[ 606.640795] env[69734]: Modules with known eventlet monkey patching issues were imported prior to eventlet monkey patching: urllib3. This warning can usually be ignored if the caller is only importing and not executing nova code. [ 607.284928] env[69784]: Modules with known eventlet monkey patching issues were imported prior to eventlet monkey patching: urllib3. This warning can usually be ignored if the caller is only importing and not executing nova code. [ 608.619546] env[69784]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69784) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 608.619902] env[69784]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69784) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 608.620048] env[69784]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69784) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 608.620308] env[69784]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 608.819188] env[69784]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69784) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:384}} [ 608.829775] env[69784]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=69784) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:422}} [ 608.932894] env[69784]: INFO nova.virt.driver [None req-486e72f5-6c4c-407a-ac39-ee19fdc705c2 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 609.005854] env[69784]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 609.006032] env[69784]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 609.006141] env[69784]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69784) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 611.921114] env[69784]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-c308920f-eb99-40d6-91f0-e815f5ce39f1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.937705] env[69784]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69784) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 611.937938] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-5ab5ff5b-bcb7-43cd-a110-98dc9206c0b4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.970240] env[69784]: INFO oslo_vmware.api [-] Successfully established new session; session ID is be529. [ 611.970422] env[69784]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.964s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 611.970909] env[69784]: INFO nova.virt.vmwareapi.driver [None req-486e72f5-6c4c-407a-ac39-ee19fdc705c2 None None] VMware vCenter version: 7.0.3 [ 611.974255] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bf7245-3373-48d8-90b2-3c7253db0753 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.994205] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a2268a-a67f-4f4a-99ec-76ad6fa9926d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.001084] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1281686-9f0d-49bb-9ec3-8af2622b8e66 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.008518] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5057cf9-c19e-4767-883a-9d48b71d1008 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.021586] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e66bf80-f5a7-45ed-a281-e8a9d20d493d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.027414] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6394e77-ead2-4b84-be86-b3e6519c4295 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.057563] env[69784]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-f78203ee-6c91-4b40-bcf0-3a92b67004d0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.062459] env[69784]: DEBUG nova.virt.vmwareapi.driver [None req-486e72f5-6c4c-407a-ac39-ee19fdc705c2 None None] Extension org.openstack.compute already exists. {{(pid=69784) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 612.065009] env[69784]: INFO nova.compute.provider_config [None req-486e72f5-6c4c-407a-ac39-ee19fdc705c2 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 612.082860] env[69784]: DEBUG nova.context [None req-486e72f5-6c4c-407a-ac39-ee19fdc705c2 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),e6b61205-27d8-4cd1-8031-7d94a86a013c(cell1) {{(pid=69784) load_cells /opt/stack/nova/nova/context.py:464}} [ 612.084823] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 612.085031] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.085725] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 612.086156] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Acquiring lock "e6b61205-27d8-4cd1-8031-7d94a86a013c" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 612.086346] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Lock "e6b61205-27d8-4cd1-8031-7d94a86a013c" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.087364] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Lock "e6b61205-27d8-4cd1-8031-7d94a86a013c" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 612.112236] env[69784]: INFO dbcounter [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Registered counter for database nova_cell0 [ 612.120844] env[69784]: INFO dbcounter [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Registered counter for database nova_cell1 [ 612.123987] env[69784]: DEBUG oslo_db.sqlalchemy.engines [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69784) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 612.124372] env[69784]: DEBUG oslo_db.sqlalchemy.engines [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69784) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 612.128653] env[69784]: DEBUG dbcounter [-] [69784] Writer thread running {{(pid=69784) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 612.129920] env[69784]: DEBUG dbcounter [-] [69784] Writer thread running {{(pid=69784) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 612.131713] env[69784]: ERROR nova.db.main.api [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 221, in main [ 612.131713] env[69784]: result = function(*args, **kwargs) [ 612.131713] env[69784]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.131713] env[69784]: return func(*args, **kwargs) [ 612.131713] env[69784]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 612.131713] env[69784]: result = fn(*args, **kwargs) [ 612.131713] env[69784]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 612.131713] env[69784]: return f(*args, **kwargs) [ 612.131713] env[69784]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 612.131713] env[69784]: return db.service_get_minimum_version(context, binaries) [ 612.131713] env[69784]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 612.131713] env[69784]: _check_db_access() [ 612.131713] env[69784]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 612.131713] env[69784]: stacktrace = ''.join(traceback.format_stack()) [ 612.131713] env[69784]: [ 612.132743] env[69784]: ERROR nova.db.main.api [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 221, in main [ 612.132743] env[69784]: result = function(*args, **kwargs) [ 612.132743] env[69784]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.132743] env[69784]: return func(*args, **kwargs) [ 612.132743] env[69784]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 612.132743] env[69784]: result = fn(*args, **kwargs) [ 612.132743] env[69784]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 612.132743] env[69784]: return f(*args, **kwargs) [ 612.132743] env[69784]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 612.132743] env[69784]: return db.service_get_minimum_version(context, binaries) [ 612.132743] env[69784]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 612.132743] env[69784]: _check_db_access() [ 612.132743] env[69784]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 612.132743] env[69784]: stacktrace = ''.join(traceback.format_stack()) [ 612.132743] env[69784]: [ 612.133299] env[69784]: WARNING nova.objects.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Failed to get minimum service version for cell e6b61205-27d8-4cd1-8031-7d94a86a013c [ 612.133299] env[69784]: WARNING nova.objects.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 612.133677] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Acquiring lock "singleton_lock" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 612.133850] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Acquired lock "singleton_lock" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 612.134102] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Releasing lock "singleton_lock" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 612.134430] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Full set of CONF: {{(pid=69784) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:362}} [ 612.134574] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ******************************************************************************** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2589}} [ 612.134713] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] Configuration options gathered from: {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2590}} [ 612.134865] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2591}} [ 612.136082] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2592}} [ 612.136082] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ================================================================================ {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2594}} [ 612.136082] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] allow_resize_to_same_host = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136082] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] arq_binding_timeout = 300 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136082] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] backdoor_port = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136082] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] backdoor_socket = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] block_device_allocate_retries = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] block_device_allocate_retries_interval = 3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cert = self.pem {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute_monitors = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.136862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] config_dir = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.137223] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] config_drive_format = iso9660 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.137223] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.137361] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] config_source = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.137534] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] console_host = devstack {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.137704] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] control_exchange = nova {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.137865] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cpu_allocation_ratio = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.138040] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] daemon = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.138218] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] debug = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.138378] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] default_access_ip_network_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.138544] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] default_availability_zone = nova {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.138703] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] default_ephemeral_format = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.138866] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] default_green_pool_size = 1000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.139132] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.139303] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] default_schedule_zone = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.139463] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] disk_allocation_ratio = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.139624] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] enable_new_services = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.139803] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] enabled_apis = ['osapi_compute'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.139987] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] enabled_ssl_apis = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.140148] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] flat_injected = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.140308] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] force_config_drive = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.140468] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] force_raw_images = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.140635] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] graceful_shutdown_timeout = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.140795] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] heal_instance_info_cache_interval = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.141024] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] host = cpu-1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.141195] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.141358] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] initial_disk_allocation_ratio = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.141519] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] initial_ram_allocation_ratio = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.141733] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.141897] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] instance_build_timeout = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.142068] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] instance_delete_interval = 300 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.142238] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] instance_format = [instance: %(uuid)s] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.142403] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] instance_name_template = instance-%08x {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.142563] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] instance_usage_audit = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.142732] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] instance_usage_audit_period = month {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.142898] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.143077] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] instances_path = /opt/stack/data/nova/instances {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.143248] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] internal_service_availability_zone = internal {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.143407] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] key = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.143567] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] live_migration_retry_count = 30 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.143730] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] log_config_append = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.143896] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.144069] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] log_dir = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.144233] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] log_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.144365] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] log_options = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.144527] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] log_rotate_interval = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.144705] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] log_rotate_interval_type = days {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.144896] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] log_rotation_type = none {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.145042] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.145176] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.145347] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.145515] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.145645] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.145810] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] long_rpc_timeout = 1800 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.145972] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] max_concurrent_builds = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.146144] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] max_concurrent_live_migrations = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.146302] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] max_concurrent_snapshots = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.146460] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] max_local_block_devices = 3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.146616] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] max_logfile_count = 30 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.146774] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] max_logfile_size_mb = 200 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.146959] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] maximum_instance_delete_attempts = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.147144] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] metadata_listen = 0.0.0.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.147315] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] metadata_listen_port = 8775 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.147481] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] metadata_workers = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.147642] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] migrate_max_retries = -1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.147813] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] mkisofs_cmd = genisoimage {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.148029] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] my_block_storage_ip = 10.180.1.21 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.148166] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] my_ip = 10.180.1.21 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.148333] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] network_allocate_retries = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.148512] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.148678] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] osapi_compute_listen = 0.0.0.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.148840] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] osapi_compute_listen_port = 8774 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.149014] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] osapi_compute_unique_server_name_scope = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.149191] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] osapi_compute_workers = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.149357] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] password_length = 12 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.149520] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] periodic_enable = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.149681] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] periodic_fuzzy_delay = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.149848] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] pointer_model = usbtablet {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.150026] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] preallocate_images = none {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.150192] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] publish_errors = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.150328] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] pybasedir = /opt/stack/nova {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.150478] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ram_allocation_ratio = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.150637] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] rate_limit_burst = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.150809] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] rate_limit_except_level = CRITICAL {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.150965] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] rate_limit_interval = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.151133] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] reboot_timeout = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.151291] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] reclaim_instance_interval = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.151448] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] record = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.151617] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] reimage_timeout_per_gb = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.151782] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] report_interval = 120 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.151941] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] rescue_timeout = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.152114] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] reserved_host_cpus = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.152274] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] reserved_host_disk_mb = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.152430] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] reserved_host_memory_mb = 512 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.152587] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] reserved_huge_pages = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.152746] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] resize_confirm_window = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.152904] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] resize_fs_using_block_device = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.153074] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] resume_guests_state_on_host_boot = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.153247] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.153411] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] rpc_response_timeout = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.153572] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] run_external_periodic_tasks = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.153741] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] running_deleted_instance_action = reap {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.153902] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] running_deleted_instance_poll_interval = 1800 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.154074] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] running_deleted_instance_timeout = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.154235] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler_instance_sync_interval = 120 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.154401] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_down_time = 720 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.154569] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] servicegroup_driver = db {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.154748] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] shelved_offload_time = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.154924] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] shelved_poll_interval = 3600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.155107] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] shutdown_timeout = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.155274] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] source_is_ipv6 = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.155435] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ssl_only = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.155684] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.155853] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] sync_power_state_interval = 600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.156025] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] sync_power_state_pool_size = 1000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.156199] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] syslog_log_facility = LOG_USER {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.156357] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] tempdir = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.156517] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] timeout_nbd = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.156685] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] transport_url = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.156873] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] update_resources_interval = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.157068] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] use_cow_images = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.157238] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] use_eventlog = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.157399] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] use_journal = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.157556] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] use_json = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.157711] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] use_rootwrap_daemon = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.157869] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] use_stderr = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.158036] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] use_syslog = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.158196] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vcpu_pin_set = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.158364] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plugging_is_fatal = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.158530] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plugging_timeout = 300 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.158693] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] virt_mkfs = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.158854] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] volume_usage_poll_interval = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.159030] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] watch_log_file = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.159209] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] web = /usr/share/spice-html5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 612.159399] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_concurrency.disable_process_locking = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.159693] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.159872] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.160079] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.160264] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.160434] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.160598] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.160778] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.auth_strategy = keystone {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.160943] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.compute_link_prefix = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.161132] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.161306] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.dhcp_domain = novalocal {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.161473] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.enable_instance_password = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.161639] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.glance_link_prefix = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.161803] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.161975] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.162151] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.instance_list_per_project_cells = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.162317] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.list_records_by_skipping_down_cells = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.162478] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.local_metadata_per_cell = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.162645] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.max_limit = 1000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.162814] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.metadata_cache_expiration = 15 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.163013] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.neutron_default_tenant_id = default {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.163203] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.use_forwarded_for = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.163372] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.use_neutron_default_nets = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.163539] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.163700] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.163868] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.164055] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.164240] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.vendordata_dynamic_targets = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.164406] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.vendordata_jsonfile_path = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.164589] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.164810] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.backend = dogpile.cache.memcached {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.164987] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.backend_argument = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.165174] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.config_prefix = cache.oslo {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.165344] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.dead_timeout = 60.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.165509] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.debug_cache_backend = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.165673] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.enable_retry_client = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.165836] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.enable_socket_keepalive = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.166036] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.enabled = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.166224] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.expiration_time = 600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.166393] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.hashclient_retry_attempts = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.166559] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.hashclient_retry_delay = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.166724] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_dead_retry = 300 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.166922] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_password = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.167111] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.167281] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.167449] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_pool_maxsize = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.167612] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.167777] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_sasl_enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.167958] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.168142] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_socket_timeout = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.168314] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.memcache_username = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.168481] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.proxies = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.168650] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.retry_attempts = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.168817] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.retry_delay = 0.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.168985] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.socket_keepalive_count = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.169191] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.socket_keepalive_idle = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.169360] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.socket_keepalive_interval = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.169525] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.tls_allowed_ciphers = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.169686] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.tls_cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.169846] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.tls_certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.170019] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.tls_enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.170183] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cache.tls_keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.170353] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.170528] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.auth_type = password {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.170694] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.170872] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.catalog_info = volumev3::publicURL {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.171047] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.171247] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.171383] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.cross_az_attach = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.171547] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.debug = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.171708] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.endpoint_template = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.171872] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.http_retries = 3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.172058] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.172240] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.172418] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.os_region_name = RegionOne {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.172585] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.172746] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cinder.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.172920] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.173096] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.cpu_dedicated_set = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.173263] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.cpu_shared_set = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.173429] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.image_type_exclude_list = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.173593] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.173757] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.max_concurrent_disk_ops = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.173919] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.max_disk_devices_to_attach = -1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.174096] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.174271] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.174438] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.resource_provider_association_refresh = 300 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.174604] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.shutdown_retry_interval = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.174815] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.175030] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] conductor.workers = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.175221] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] console.allowed_origins = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.175388] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] console.ssl_ciphers = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.175565] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] console.ssl_minimum_version = default {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.175739] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] consoleauth.token_ttl = 600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.175913] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.176087] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.176256] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.176417] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.connect_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.176573] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.connect_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.176732] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.endpoint_override = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.176938] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.177131] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.177335] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.max_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.177530] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.min_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.177697] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.region_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.177860] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.service_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.178043] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.service_type = accelerator {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.178255] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.178419] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.status_code_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.178579] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.status_code_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.178737] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.178920] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.179107] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] cyborg.version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.179285] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.backend = sqlalchemy {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.179466] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.connection = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.179642] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.connection_debug = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.179837] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.connection_parameters = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.180020] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.connection_recycle_time = 3600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.180223] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.connection_trace = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.180420] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.db_inc_retry_interval = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.180595] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.db_max_retries = 20 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.180779] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.db_max_retry_interval = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.180964] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.db_retry_interval = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.181159] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.max_overflow = 50 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.181347] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.max_pool_size = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182074] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.max_retries = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182074] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182074] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.mysql_wsrep_sync_wait = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182074] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.pool_timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182242] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.retry_interval = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182395] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.slave_connection = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182592] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.sqlite_synchronous = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182760] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] database.use_db_reconnect = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.182956] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.backend = sqlalchemy {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.183140] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.connection = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.183324] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.connection_debug = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.183512] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.connection_parameters = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.183683] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.connection_recycle_time = 3600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.183854] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.connection_trace = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.184033] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.db_inc_retry_interval = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.184202] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.db_max_retries = 20 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.184364] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.db_max_retry_interval = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.184527] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.db_retry_interval = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.184698] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.max_overflow = 50 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.184864] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.max_pool_size = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.185042] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.max_retries = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.185219] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.185380] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.185541] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.pool_timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.185710] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.retry_interval = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.185873] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.slave_connection = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.186049] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] api_database.sqlite_synchronous = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.186227] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] devices.enabled_mdev_types = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.186428] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.186602] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ephemeral_storage_encryption.enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.186773] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.186973] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.api_servers = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.187159] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.187324] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.187487] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.187647] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.connect_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.187814] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.connect_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.187993] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.debug = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.188178] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.default_trusted_certificate_ids = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.188343] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.enable_certificate_validation = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.188506] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.enable_rbd_download = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.188664] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.endpoint_override = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.188830] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.188991] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.189165] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.max_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.189328] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.min_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.189520] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.num_retries = 3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.189696] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.rbd_ceph_conf = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.189862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.rbd_connect_timeout = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.190044] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.rbd_pool = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.190217] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.rbd_user = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.190379] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.region_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.190539] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.service_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.190707] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.service_type = image {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.190875] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.191048] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.status_code_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.191214] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.status_code_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.191387] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.191555] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.191723] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.verify_glance_signatures = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.191882] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] glance.version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.192059] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] guestfs.debug = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.192234] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.config_drive_cdrom = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.192408] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.config_drive_inject_password = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.192594] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.dynamic_memory_ratio = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.192760] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.enable_instance_metrics_collection = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.192924] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.enable_remotefx = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.193107] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.instances_path_share = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.193276] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.iscsi_initiator_list = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.193439] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.limit_cpu_features = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.193604] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.mounted_disk_query_retry_count = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.193767] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.mounted_disk_query_retry_interval = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.193926] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.power_state_check_timeframe = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.194108] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.power_state_event_polling_interval = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.194281] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.qemu_img_cmd = qemu-img.exe {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.194443] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.use_multipath_io = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.194607] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.volume_attach_retry_count = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.194769] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.volume_attach_retry_interval = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.194955] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.vswitch_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.195139] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] hyperv.wait_soft_reboot_seconds = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.195312] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] mks.enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.195671] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.195862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] image_cache.manager_interval = 2400 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.196047] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] image_cache.precache_concurrency = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.196224] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] image_cache.remove_unused_base_images = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.196398] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.196565] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.196740] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] image_cache.subdirectory_name = _base {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.196946] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.api_max_retries = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.197136] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.api_retry_interval = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.197300] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.197466] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.auth_type = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.197626] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.197790] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.197995] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.198186] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.conductor_group = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.198347] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.connect_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.198510] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.connect_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.198671] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.endpoint_override = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.198835] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.198994] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.199171] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.max_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.199330] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.min_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.199498] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.peer_list = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.199658] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.region_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.199823] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.serial_console_state_timeout = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.199985] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.service_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.200169] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.service_type = baremetal {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.200335] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.200493] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.status_code_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.200653] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.status_code_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.201250] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.201250] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.201250] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ironic.version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.201385] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.201530] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] key_manager.fixed_key = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.201707] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.201875] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.barbican_api_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.202047] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.barbican_endpoint = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.202223] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.barbican_endpoint_type = public {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.202387] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.barbican_region_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.202547] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.202707] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.202870] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.203042] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.203206] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.203369] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.number_of_retries = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.203528] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.retry_delay = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.203689] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.send_service_user_token = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.203851] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.204049] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.204225] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.verify_ssl = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.204388] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican.verify_ssl_path = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.204554] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.204717] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.auth_type = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.204879] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.205050] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.205219] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.205384] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.205543] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.205706] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.205864] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] barbican_service_user.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.206060] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.approle_role_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.206236] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.approle_secret_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.206397] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.206557] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.206723] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.206912] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.207108] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.207291] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.kv_mountpoint = secret {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.207453] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.kv_path = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.207618] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.kv_version = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.207777] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.namespace = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.207939] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.root_token_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.208120] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.208284] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.ssl_ca_crt_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.208443] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.208605] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.use_ssl = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.208777] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.208948] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.209127] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.auth_type = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.209292] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.209452] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.209616] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.209774] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.connect_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.209945] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.connect_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.210137] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.endpoint_override = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.210307] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.210468] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.210628] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.max_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.210788] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.min_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.210945] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.region_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.211118] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.service_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.211294] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.service_type = identity {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.211457] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.211617] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.status_code_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.211774] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.status_code_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.211931] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.212123] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.212283] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] keystone.version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.212485] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.connection_uri = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.212650] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.cpu_mode = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.212820] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.cpu_model_extra_flags = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.213013] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.cpu_models = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.213207] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.cpu_power_governor_high = performance {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.213382] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.cpu_power_governor_low = powersave {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.213547] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.cpu_power_management = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.213717] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.213885] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.device_detach_attempts = 8 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.214060] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.device_detach_timeout = 20 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.214232] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.disk_cachemodes = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.214394] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.disk_prefix = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.214562] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.enabled_perf_events = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.214729] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.file_backed_memory = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.214897] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.gid_maps = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.215072] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.hw_disk_discard = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.215234] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.hw_machine_type = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.215407] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.images_rbd_ceph_conf = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.215574] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.215742] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.215914] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.images_rbd_glance_store_name = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.216122] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.images_rbd_pool = rbd {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.216304] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.images_type = default {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.216468] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.images_volume_group = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.216632] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.inject_key = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.216800] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.inject_partition = -2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.216985] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.inject_password = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.217167] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.iscsi_iface = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.217330] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.iser_use_multipath = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.217494] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_bandwidth = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.217655] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.217834] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_downtime = 500 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.218024] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.218207] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.218375] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_inbound_addr = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.218543] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.218708] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_permit_post_copy = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.218874] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_scheme = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.219082] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_timeout_action = abort {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.219270] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_tunnelled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.219435] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_uri = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.219599] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.live_migration_with_native_tls = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.219758] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.max_queues = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.219923] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.220099] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.nfs_mount_options = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.220410] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.220586] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.220752] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.num_iser_scan_tries = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.220911] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.num_memory_encrypted_guests = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.221086] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.221252] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.num_pcie_ports = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.221416] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.num_volume_scan_tries = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.221581] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.pmem_namespaces = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.221742] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.quobyte_client_cfg = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.222042] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.222241] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rbd_connect_timeout = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.222414] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.222582] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.222748] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rbd_secret_uuid = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.222909] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rbd_user = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.223089] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.223266] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.remote_filesystem_transport = ssh {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.223429] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rescue_image_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.223590] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rescue_kernel_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.223752] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rescue_ramdisk_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.223924] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.224098] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.rx_queue_size = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.224269] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.smbfs_mount_options = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.224545] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.224744] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.snapshot_compression = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.224931] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.snapshot_image_format = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.225177] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.225345] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.sparse_logical_volumes = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.225512] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.swtpm_enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.225683] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.swtpm_group = tss {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.225853] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.swtpm_user = tss {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.226036] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.sysinfo_serial = unique {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.226201] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.tb_cache_size = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.226359] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.tx_queue_size = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.226526] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.uid_maps = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.226690] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.use_virtio_for_bridges = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.226896] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.virt_type = kvm {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.227088] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.volume_clear = zero {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.227257] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.volume_clear_size = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.227427] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.volume_use_multipath = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.227589] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.vzstorage_cache_path = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.227760] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.227967] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.vzstorage_mount_group = qemu {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.228162] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.vzstorage_mount_opts = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.228337] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.228613] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.228792] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.vzstorage_mount_user = stack {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.228960] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.229148] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.229323] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.auth_type = password {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.229487] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.229651] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.229815] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.229975] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.connect_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.230147] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.connect_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.230320] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.default_floating_pool = public {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.230482] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.endpoint_override = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.230650] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.extension_sync_interval = 600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.230811] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.http_retries = 3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.231010] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.231202] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.231367] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.max_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.231541] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.231700] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.min_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.231871] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.ovs_bridge = br-int {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.232050] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.physnets = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.232225] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.region_name = RegionOne {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.232394] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.service_metadata_proxy = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.232558] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.service_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.232728] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.service_type = network {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.232890] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.233060] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.status_code_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.233222] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.status_code_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.233380] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.233561] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.233720] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] neutron.version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.233905] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] notifications.bdms_in_notifications = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.234111] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] notifications.default_level = INFO {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.234293] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] notifications.notification_format = unversioned {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.234458] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] notifications.notify_on_state_change = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.234634] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.234815] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] pci.alias = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.234984] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] pci.device_spec = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.235162] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] pci.report_in_placement = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.235334] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.235505] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.auth_type = password {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.235674] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.235837] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.235998] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.236175] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.236338] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.connect_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.236499] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.connect_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.236656] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.default_domain_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.236832] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.default_domain_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.237032] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.domain_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.237208] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.domain_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.237367] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.endpoint_override = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.237529] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.237688] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.237866] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.max_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.238050] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.min_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.238224] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.password = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.238387] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.project_domain_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.238553] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.project_domain_name = Default {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.238719] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.project_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.238892] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.project_name = service {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.239075] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.region_name = RegionOne {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.239240] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.service_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.239409] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.service_type = placement {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.239574] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.239733] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.status_code_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.239890] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.status_code_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.240092] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.system_scope = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.240262] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.240424] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.trust_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.240586] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.user_domain_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.240752] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.user_domain_name = Default {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.240912] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.user_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.241095] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.username = placement {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.241280] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.241449] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] placement.version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.241625] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.cores = 20 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.241791] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.count_usage_from_placement = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.241965] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.242209] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.injected_file_content_bytes = 10240 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.242395] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.injected_file_path_length = 255 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.242567] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.injected_files = 5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.242736] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.instances = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.242902] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.key_pairs = 100 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.243109] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.metadata_items = 128 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.243289] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.ram = 51200 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.243457] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.recheck_quota = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.243626] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.server_group_members = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.243793] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] quota.server_groups = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.243964] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] rdp.enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.244285] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] rdp.html5_proxy_base_url = http://127.0.0.1:6083/ {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.244471] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.244639] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.244806] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.image_metadata_prefilter = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.244972] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.245152] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.max_attempts = 3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.245320] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.max_placement_results = 1000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.245487] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.245652] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.query_placement_for_image_type_support = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.245817] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.246005] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] scheduler.workers = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.246208] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.246387] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.246569] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.246742] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.246934] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.247121] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.247291] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.247482] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.247652] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.host_subset_size = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.247833] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.248070] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.248255] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.248424] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.isolated_hosts = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.248589] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.isolated_images = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.248751] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.248913] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.249109] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.249292] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.pci_in_placement = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.249456] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.249616] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.249784] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.249945] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.250127] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.250296] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.250458] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.track_instance_changes = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.250633] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.250802] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] metrics.required = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.250967] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] metrics.weight_multiplier = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.251152] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.251314] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] metrics.weight_setting = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.251608] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.251783] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] serial_console.enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.251968] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] serial_console.port_range = 10000:20000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.252173] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.252355] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.252531] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] serial_console.serialproxy_port = 6083 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.252704] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.252879] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.auth_type = password {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.253057] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.253224] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.253390] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.253553] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.253713] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.253898] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.send_service_user_token = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.254074] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.254234] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] service_user.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.254405] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.agent_enabled = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.254568] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.254881] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.255097] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.255277] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.html5proxy_port = 6082 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.255446] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.image_compression = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.255609] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.jpeg_compression = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.255770] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.playback_compression = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.255946] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.server_listen = 127.0.0.1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.256134] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.256301] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.streaming_mode = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.256462] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] spice.zlib_compression = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.256629] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] upgrade_levels.baseapi = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.256793] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] upgrade_levels.cert = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.256985] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] upgrade_levels.compute = auto {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.257166] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] upgrade_levels.conductor = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.257327] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] upgrade_levels.scheduler = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.257496] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.257658] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.auth_type = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.257837] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.258047] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.258227] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.258391] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.258552] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.258713] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.258872] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vendordata_dynamic_auth.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.259059] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.api_retry_count = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.259227] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.ca_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.259402] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.cache_prefix = devstack-image-cache {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.259573] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.cluster_name = testcl1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.259739] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.connection_pool_size = 10 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.259902] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.console_delay_seconds = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.260090] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.datastore_regex = ^datastore.* {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.260299] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.260475] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.host_password = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.260645] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.host_port = 443 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.260817] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.host_username = administrator@vsphere.local {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.261063] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.insecure = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.261253] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.integration_bridge = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.261421] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.maximum_objects = 100 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.261583] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.pbm_default_policy = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.261750] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.pbm_enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.261911] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.pbm_wsdl_location = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.262094] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.262262] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.serial_port_proxy_uri = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.262422] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.serial_port_service_uri = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.262589] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.task_poll_interval = 0.5 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.262761] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.use_linked_clone = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.262931] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.vnc_keymap = en-us {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.263112] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.vnc_port = 5900 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.263278] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vmware.vnc_port_total = 10000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.263466] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.auth_schemes = ['none'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.263643] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.263941] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.264172] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.264355] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.novncproxy_port = 6080 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.264537] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.server_listen = 127.0.0.1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.264715] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.264906] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.vencrypt_ca_certs = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.265096] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.vencrypt_client_cert = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.265266] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vnc.vencrypt_client_key = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.265443] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.265609] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.disable_deep_image_inspection = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.265773] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.265936] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.266113] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.266280] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.disable_rootwrap = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.266441] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.enable_numa_live_migration = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.266600] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.266760] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.266951] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.267157] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.libvirt_disable_apic = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.267332] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.267499] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.267665] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.267828] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.268024] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.268203] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.268367] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.268530] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.268694] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.268862] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.269064] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.269240] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.client_socket_timeout = 900 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.269408] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.default_pool_size = 1000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.269575] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.keep_alive = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.269745] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.max_header_line = 16384 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.269911] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.secure_proxy_ssl_header = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.270107] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.ssl_ca_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.270290] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.ssl_cert_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.270456] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.ssl_key_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.270624] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.tcp_keepidle = 600 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.270800] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.270971] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] zvm.ca_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.271149] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] zvm.cloud_connector_url = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.271433] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.271606] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] zvm.reachable_timeout = 300 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.271788] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.enforce_new_defaults = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.271959] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.enforce_scope = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.272150] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.policy_default_rule = default {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.272333] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.272510] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.policy_file = policy.yaml {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.272683] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.272850] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.273024] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.273219] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.273390] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.273562] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.273738] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.273913] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.connection_string = messaging:// {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.274094] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.enabled = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.274266] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.es_doc_type = notification {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.274434] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.es_scroll_size = 10000 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.274603] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.es_scroll_time = 2m {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.274765] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.filter_error_trace = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.274933] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.hmac_keys = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.275116] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.sentinel_service_name = mymaster {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.275284] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.socket_timeout = 0.1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.275445] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.trace_requests = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.275610] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler.trace_sqlalchemy = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.275795] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler_jaeger.process_tags = {} {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.275962] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler_jaeger.service_name_prefix = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.276157] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] profiler_otlp.service_name_prefix = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.276361] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] remote_debug.host = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.276574] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] remote_debug.port = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.276762] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.276960] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.277156] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.277325] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.277493] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.277657] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.277822] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.277989] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.heartbeat_rate = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.278166] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.278332] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.278563] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.278747] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.278925] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.279109] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.279278] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.279457] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.279623] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.279788] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.279955] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.280137] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.280302] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.280470] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.280634] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.280804] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.280970] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.281153] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.ssl = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.281331] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.281537] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.281706] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.281882] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.282068] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_rabbit.ssl_version = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.282263] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.282436] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_notifications.retry = -1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.282623] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.282799] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_messaging_notifications.transport_url = **** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.282982] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.auth_section = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.283153] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.auth_type = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.283314] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.cafile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.283481] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.certfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.283645] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.collect_timing = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.283803] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.connect_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.283961] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.connect_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.284131] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.endpoint_id = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.284291] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.endpoint_override = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.284469] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.insecure = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.284640] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.keyfile = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.284802] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.max_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.284959] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.min_version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.285130] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.region_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.285292] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.service_name = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.285449] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.service_type = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.285612] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.split_loggers = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.285771] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.status_code_retries = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.285933] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.status_code_retry_delay = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.286104] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.timeout = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.286264] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.valid_interfaces = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.286421] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_limit.version = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.286587] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_reports.file_event_handler = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.286753] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.286944] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] oslo_reports.log_dir = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.287136] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.287321] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.287506] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.287687] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.287855] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.288059] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.288244] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.288409] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_ovs_privileged.group = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.288572] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.288738] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.288905] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.289078] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] vif_plug_ovs_privileged.user = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.289254] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_linux_bridge.flat_interface = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.289435] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.289613] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.289790] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.289966] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.290159] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.290333] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.290521] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.290717] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.290892] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_ovs.isolate_vif = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.291076] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.291250] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.291421] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.291593] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_ovs.ovsdb_interface = native {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.291756] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_vif_ovs.per_port_bridge = False {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.291923] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_brick.lock_path = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.292103] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.292272] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.292441] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] privsep_osbrick.capabilities = [21] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.292604] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] privsep_osbrick.group = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.292761] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] privsep_osbrick.helper_command = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.292926] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.293103] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.293266] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] privsep_osbrick.user = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.293440] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.293626] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] nova_sys_admin.group = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.293791] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] nova_sys_admin.helper_command = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.293961] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.294138] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.294300] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] nova_sys_admin.user = None {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 612.294432] env[69784]: DEBUG oslo_service.service [None req-c3d51715-49d5-4e3d-b324-1078995fe35b None None] ******************************************************************************** {{(pid=69784) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 612.294852] env[69784]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 612.304538] env[69784]: WARNING nova.virt.vmwareapi.driver [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 612.304973] env[69784]: INFO nova.virt.node [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Generated node identity dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 [ 612.305207] env[69784]: INFO nova.virt.node [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Wrote node identity dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 to /opt/stack/data/n-cpu-1/compute_id [ 612.318678] env[69784]: WARNING nova.compute.manager [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Compute nodes ['dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 612.352465] env[69784]: INFO nova.compute.manager [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 612.372938] env[69784]: WARNING nova.compute.manager [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 612.373189] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 612.373422] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.373599] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 612.373905] env[69784]: DEBUG nova.compute.resource_tracker [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 612.374909] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950b00ec-fda6-4357-b58e-240b8caeeeb9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.383885] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1081f13-cf4d-4ae3-af1b-dfcabb9fd4a5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.397566] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0f570a-b46e-4b5a-8b10-d100ada5f3ee {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.403708] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209b68cf-b8c8-466a-bcaf-9e343490e183 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.433172] env[69784]: DEBUG nova.compute.resource_tracker [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180960MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 612.433271] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 612.433703] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.444437] env[69784]: WARNING nova.compute.resource_tracker [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] No compute node record for cpu-1:dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 could not be found. [ 612.457014] env[69784]: INFO nova.compute.resource_tracker [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 [ 612.510267] env[69784]: DEBUG nova.compute.resource_tracker [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 612.510426] env[69784]: DEBUG nova.compute.resource_tracker [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 612.623615] env[69784]: INFO nova.scheduler.client.report [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] [req-a9b5327c-8bad-47df-b23d-ba2071a15b21] Created resource provider record via placement API for resource provider with UUID dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 612.639601] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71d3afe-b462-4fc1-8300-6478fece83d0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.647247] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6bdc14-cf21-4c12-975a-26567c2f90d6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.675786] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed4f908-0195-4912-929f-d4d2d1f3cc1d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.682478] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0034f011-3ace-4366-a42f-2b8ad31f9ed8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.694978] env[69784]: DEBUG nova.compute.provider_tree [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Updating inventory in ProviderTree for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 612.729535] env[69784]: DEBUG nova.scheduler.client.report [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Updated inventory for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 612.729751] env[69784]: DEBUG nova.compute.provider_tree [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Updating resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 generation from 0 to 1 during operation: update_inventory {{(pid=69784) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 612.729891] env[69784]: DEBUG nova.compute.provider_tree [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Updating inventory in ProviderTree for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 612.776613] env[69784]: DEBUG nova.compute.provider_tree [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Updating resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 generation from 1 to 2 during operation: update_traits {{(pid=69784) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 612.793919] env[69784]: DEBUG nova.compute.resource_tracker [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 612.794131] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.361s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 612.794299] env[69784]: DEBUG nova.service [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Creating RPC server for service compute {{(pid=69784) start /opt/stack/nova/nova/service.py:182}} [ 612.807164] env[69784]: DEBUG nova.service [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] Join ServiceGroup membership for this service compute {{(pid=69784) start /opt/stack/nova/nova/service.py:199}} [ 612.807363] env[69784]: DEBUG nova.servicegroup.drivers.db [None req-5ce62824-9841-4e61-bfbe-0d39cf5d31df None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69784) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 622.130099] env[69784]: DEBUG dbcounter [-] [69784] Writing DB stats nova_cell1:SELECT=1 {{(pid=69784) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 622.132495] env[69784]: DEBUG dbcounter [-] [69784] Writing DB stats nova_cell0:SELECT=1 {{(pid=69784) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 655.766123] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquiring lock "e2045977-fc76-4694-b233-ee747d1a1837" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 655.766711] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Lock "e2045977-fc76-4694-b233-ee747d1a1837" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 655.787901] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 655.910444] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 655.910556] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 655.912257] env[69784]: INFO nova.compute.claims [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.062396] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059326df-75c3-4938-9792-cdb2f6580064 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.072333] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd2f50f-b09d-4647-bd39-3ff1f996db8b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.113030] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1313fc8-ee7a-425b-adeb-805444fa9d7b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.130458] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6894c210-4fee-45b0-a31e-16aed609c4db {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.158123] env[69784]: DEBUG nova.compute.provider_tree [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.176232] env[69784]: DEBUG nova.scheduler.client.report [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 656.215485] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 656.215485] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 656.286503] env[69784]: DEBUG nova.compute.utils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 656.288477] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 656.288732] env[69784]: DEBUG nova.network.neutron [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 656.307797] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 656.419797] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 657.012988] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquiring lock "54d12624-7c76-433d-8f1f-3f9bb451e451" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 657.014151] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Lock "54d12624-7c76-433d-8f1f-3f9bb451e451" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 657.035856] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 657.105156] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 657.105156] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 657.106827] env[69784]: INFO nova.compute.claims [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.259240] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ed70a5-8bb8-4251-a066-cbda57f0dae2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.270503] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db4612d-43ce-4362-b584-6ac49b8a46ae {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.299682] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9477ccaa-3621-48e6-9bfe-09ce238b758d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.307854] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0973f6-585a-40bb-8e16-bb9f21b104cb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.327041] env[69784]: DEBUG nova.compute.provider_tree [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.338975] env[69784]: DEBUG nova.scheduler.client.report [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 657.369811] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.262s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 657.369811] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 657.430880] env[69784]: DEBUG nova.compute.utils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 657.431848] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 657.435394] env[69784]: DEBUG nova.network.neutron [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 657.454394] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 657.538050] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 657.620725] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 657.620922] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 657.621112] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.623039] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 657.623039] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.623039] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 657.623039] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 657.623039] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 657.623242] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 657.623242] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 657.623594] env[69784]: DEBUG nova.virt.hardware [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 657.625145] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526da8e4-0077-4c4b-914f-4885fc24ec7f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.637306] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4018eb8-9039-48d2-96b7-3f3568775ff4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.657622] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4112449d-fcaf-4b11-85fa-4c4818625533 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.707244] env[69784]: DEBUG nova.policy [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec967aede59e4291a05a1bbe0d9d04b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c029233dfe747498ed1acedccf31a4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 658.009011] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 658.009011] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 658.009011] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.009324] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 658.009324] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.009742] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 658.010121] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 658.010394] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 658.010664] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 658.010924] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 658.011261] env[69784]: DEBUG nova.virt.hardware [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 658.012337] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe91249-b4b1-4fd3-add8-cdc1e59a5e34 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.021805] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ad71cd-9e5a-42b4-974b-dee225a6f649 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.049592] env[69784]: DEBUG nova.policy [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62783b8779004be9a585c5a27f71966e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a525d26822b747de8c59e091d172dc69', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 658.376257] env[69784]: DEBUG nova.network.neutron [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Successfully created port: e1380432-497a-4ed0-ae9c-a7258fdbc796 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.409095] env[69784]: DEBUG nova.network.neutron [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Successfully created port: 80d62142-68b0-4b90-9a88-f5488dd3e0d1 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.746944] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquiring lock "150d9f3b-b338-4810-ad32-7c8609131ce4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 659.746944] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 659.762900] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 659.852183] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 659.852183] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 659.852183] env[69784]: INFO nova.compute.claims [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.037579] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6260f8d1-b831-4e38-8e74-586e6a101546 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.045402] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bad8b45-19af-4c45-be33-a98a31c12b4c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.081173] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0507ab9e-5715-44c6-81b8-5dac791dd8cb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.087726] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f1514-0c00-4deb-9435-eaf330a33d53 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.103437] env[69784]: DEBUG nova.compute.provider_tree [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.113355] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 660.113355] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 660.120320] env[69784]: DEBUG nova.scheduler.client.report [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 660.143298] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 660.149542] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.299s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 660.150093] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 660.200732] env[69784]: DEBUG nova.compute.utils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.207858] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 660.208121] env[69784]: DEBUG nova.network.neutron [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 660.217166] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 660.230115] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 660.230369] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 660.232105] env[69784]: INFO nova.compute.claims [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.320746] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 660.360872] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 660.360872] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 660.360872] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.361095] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 660.361095] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.361095] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 660.361095] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 660.361095] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 660.361274] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 660.361274] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 660.361467] env[69784]: DEBUG nova.virt.hardware [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 660.364339] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73281fe-63dd-49a6-a01f-b75d5908b559 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.375187] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb8dfc6-1e9b-4d03-8a21-f1dcb8f329b4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.419527] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65391a8-b945-4a3a-8e85-a0fd8bffd6a5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.427944] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e226fe25-eb87-44b8-8541-d68603f20920 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.461964] env[69784]: DEBUG nova.policy [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c1ade2e54e74af6b4e6b368588a3c97', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2703b16bb66a41caa48e003d26148af6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 660.464011] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0374d87f-eb8b-411e-a942-6368bf7d096d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.471918] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea95c0b3-9068-45be-96f7-985c995703de {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.487574] env[69784]: DEBUG nova.compute.provider_tree [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.498127] env[69784]: DEBUG nova.scheduler.client.report [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 660.513564] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.283s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 660.514326] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 660.555801] env[69784]: DEBUG nova.compute.utils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.557497] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 660.557497] env[69784]: DEBUG nova.network.neutron [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 660.568981] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 660.639066] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 660.663176] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 660.663427] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 660.663587] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.663767] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 660.663930] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.664171] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 660.664400] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 660.664560] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 660.664727] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 660.664886] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 660.665066] env[69784]: DEBUG nova.virt.hardware [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 660.666192] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4f6b3a-10f0-464b-bd82-126f9bf30901 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.674610] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b33fd5-498f-4e0f-be58-7b53f7b0ae08 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.783543] env[69784]: DEBUG nova.policy [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '291f8f93940040e4a112f9e27e0a02ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e92cdeb6b85443d9b7035a4523733e13', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 661.140352] env[69784]: DEBUG nova.network.neutron [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Successfully updated port: e1380432-497a-4ed0-ae9c-a7258fdbc796 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.162569] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquiring lock "refresh_cache-e2045977-fc76-4694-b233-ee747d1a1837" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 661.162730] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquired lock "refresh_cache-e2045977-fc76-4694-b233-ee747d1a1837" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 661.162878] env[69784]: DEBUG nova.network.neutron [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 661.315091] env[69784]: DEBUG nova.network.neutron [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.950655] env[69784]: DEBUG nova.network.neutron [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Successfully updated port: 80d62142-68b0-4b90-9a88-f5488dd3e0d1 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.969017] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquiring lock "refresh_cache-54d12624-7c76-433d-8f1f-3f9bb451e451" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 661.969017] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquired lock "refresh_cache-54d12624-7c76-433d-8f1f-3f9bb451e451" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 661.969017] env[69784]: DEBUG nova.network.neutron [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 662.106612] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquiring lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 662.106612] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 662.114422] env[69784]: DEBUG nova.network.neutron [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.127583] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 662.231407] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 662.231407] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 662.232939] env[69784]: INFO nova.compute.claims [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.276874] env[69784]: DEBUG nova.network.neutron [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Successfully created port: 596c2edd-b1b5-4640-9f4a-3f086d653a2a {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.337586] env[69784]: DEBUG nova.network.neutron [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Updating instance_info_cache with network_info: [{"id": "e1380432-497a-4ed0-ae9c-a7258fdbc796", "address": "fa:16:3e:4b:f4:8d", "network": {"id": "1253923e-b2a2-473a-980c-4a2980b07abe", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1695774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c029233dfe747498ed1acedccf31a4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1380432-49", "ovs_interfaceid": "e1380432-497a-4ed0-ae9c-a7258fdbc796", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.354393] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Releasing lock "refresh_cache-e2045977-fc76-4694-b233-ee747d1a1837" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 662.354393] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Instance network_info: |[{"id": "e1380432-497a-4ed0-ae9c-a7258fdbc796", "address": "fa:16:3e:4b:f4:8d", "network": {"id": "1253923e-b2a2-473a-980c-4a2980b07abe", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1695774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c029233dfe747498ed1acedccf31a4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1380432-49", "ovs_interfaceid": "e1380432-497a-4ed0-ae9c-a7258fdbc796", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 662.354720] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:f4:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e9fa4744-8702-4973-b911-ee18192a3e4b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1380432-497a-4ed0-ae9c-a7258fdbc796', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.369458] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.372217] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3de1056b-ab7e-4e9e-8c5a-39a31f068455 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.388658] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Created folder: OpenStack in parent group-v4. [ 662.389093] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Creating folder: Project (2c029233dfe747498ed1acedccf31a4c). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.390352] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ce65b96-df53-4057-9ef1-3c6fc2ea950d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.407124] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Created folder: Project (2c029233dfe747498ed1acedccf31a4c) in parent group-v692547. [ 662.407124] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Creating folder: Instances. Parent ref: group-v692548. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.410034] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d350f34-892a-4141-a240-6da26bef93c3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.425387] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Created folder: Instances in parent group-v692548. [ 662.425387] env[69784]: DEBUG oslo.service.loopingcall [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.425387] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 662.425387] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-354fa1a3-2671-48a1-ae14-fad802483964 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.442465] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a18d4a-356b-4418-b82d-d09cc2e42926 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.451137] env[69784]: DEBUG nova.network.neutron [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Successfully created port: 5a2e4b5d-a74e-4b25-bd20-634fb41af4da {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.456286] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.456286] env[69784]: value = "task-3467004" [ 662.456286] env[69784]: _type = "Task" [ 662.456286] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.471241] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c114ce7-00da-4a9a-97cb-faf07d168479 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.473166] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467004, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.502505] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6ca4ab-ee1a-44d1-84db-8b425f8278e6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.512895] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c359f0-73f1-46f3-8e10-217e49d291b7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.527751] env[69784]: DEBUG nova.compute.provider_tree [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.539232] env[69784]: DEBUG nova.scheduler.client.report [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 662.559293] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.328s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 662.561299] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 662.606024] env[69784]: DEBUG nova.compute.utils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 662.606024] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 662.606692] env[69784]: DEBUG nova.network.neutron [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 662.620957] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 662.707890] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 662.757815] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.758221] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.758473] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.759252] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.759499] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.759889] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.760125] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.761053] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.761053] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.761053] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.761053] env[69784]: DEBUG nova.virt.hardware [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.765693] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9e2886-2592-46a1-b8b4-3f84555ffb9c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.775511] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d686f1cc-60df-4fac-8d33-9d446b5ba2e5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.969063] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467004, 'name': CreateVM_Task, 'duration_secs': 0.315044} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.969327] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 663.002278] env[69784]: DEBUG oslo_vmware.service [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1460f78-e128-4253-b73c-221ab9a67651 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.010686] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 663.010686] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 663.011362] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 663.011622] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb93fae4-3d72-4d4a-b447-5fd8b077ef60 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.017991] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Waiting for the task: (returnval){ [ 663.017991] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52dc13ab-a054-1021-ebd2-50d6cab846e4" [ 663.017991] env[69784]: _type = "Task" [ 663.017991] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.028366] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52dc13ab-a054-1021-ebd2-50d6cab846e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.044573] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "28e32097-d536-442f-bcb4-f93b64cb64e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 663.044791] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 663.063777] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 663.075086] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "a927e0ff-6c39-47fd-a082-88d41eb54015" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 663.075316] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 663.090975] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 663.136183] env[69784]: DEBUG nova.policy [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8f6f6a08f60452082808f45d2508053', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45a6b45b635c4110820784a8b63df746', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 663.138334] env[69784]: DEBUG nova.network.neutron [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Updating instance_info_cache with network_info: [{"id": "80d62142-68b0-4b90-9a88-f5488dd3e0d1", "address": "fa:16:3e:f8:9b:6f", "network": {"id": "29465308-b300-4555-b996-9e6d38f9aa34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1579267705-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a525d26822b747de8c59e091d172dc69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80d62142-68", "ovs_interfaceid": "80d62142-68b0-4b90-9a88-f5488dd3e0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.154571] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 663.154875] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 663.156390] env[69784]: INFO nova.compute.claims [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.159692] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Releasing lock "refresh_cache-54d12624-7c76-433d-8f1f-3f9bb451e451" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 663.159948] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Instance network_info: |[{"id": "80d62142-68b0-4b90-9a88-f5488dd3e0d1", "address": "fa:16:3e:f8:9b:6f", "network": {"id": "29465308-b300-4555-b996-9e6d38f9aa34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1579267705-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a525d26822b747de8c59e091d172dc69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80d62142-68", "ovs_interfaceid": "80d62142-68b0-4b90-9a88-f5488dd3e0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 663.160609] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:9b:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33fdc099-7497-41c1-b40c-1558937132d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '80d62142-68b0-4b90-9a88-f5488dd3e0d1', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.168091] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Creating folder: Project (a525d26822b747de8c59e091d172dc69). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.168657] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-911716a0-dae1-43b2-a9ec-75850b761b1e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.181602] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Created folder: Project (a525d26822b747de8c59e091d172dc69) in parent group-v692547. [ 663.182147] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Creating folder: Instances. Parent ref: group-v692551. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.184220] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03d3ea32-02ac-4662-8a84-2611f66223c1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.194944] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Created folder: Instances in parent group-v692551. [ 663.195207] env[69784]: DEBUG oslo.service.loopingcall [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.198534] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 663.198534] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa032777-e7e9-4b55-8fe3-c69bb7738b67 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.214137] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 663.222947] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.222947] env[69784]: value = "task-3467007" [ 663.222947] env[69784]: _type = "Task" [ 663.222947] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.232529] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467007, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.388115] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42d0a5e-d8b6-41ec-bb6a-b1aa50e4738b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.396378] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dedfe31-27db-4dd2-aaf6-6c5b30838554 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.430472] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085d4cef-e6c8-4e13-afc7-0856a6569fb5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.438015] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b72468-1c62-46f2-a1fd-8e69fd83a772 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.452704] env[69784]: DEBUG nova.compute.provider_tree [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.464035] env[69784]: DEBUG nova.scheduler.client.report [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 663.485338] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.330s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 663.485842] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 663.488827] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.274s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 663.490491] env[69784]: INFO nova.compute.claims [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.529763] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 663.530102] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.530350] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 663.530496] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 663.531080] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.531173] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2c8363d-fbb5-4767-b38b-d9ead736e9ce {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.537508] env[69784]: DEBUG nova.compute.utils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 663.540667] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 663.540950] env[69784]: DEBUG nova.network.neutron [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 663.543201] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.543395] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 663.547411] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43cd491-d2cd-43bd-bd44-65fd2ea935ec {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.554879] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 663.564178] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a30dd2e3-dbce-41a7-ac94-4faef7e781cb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.570239] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Waiting for the task: (returnval){ [ 663.570239] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5265840a-80ea-aece-3b77-c8cfb908057c" [ 663.570239] env[69784]: _type = "Task" [ 663.570239] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.580591] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5265840a-80ea-aece-3b77-c8cfb908057c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.645835] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 663.677038] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 663.677038] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 663.677038] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.677261] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 663.677261] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.677261] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 663.677261] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 663.677261] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 663.677463] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 663.677463] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 663.677529] env[69784]: DEBUG nova.virt.hardware [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 663.678374] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a433a76-ad33-49a6-8387-80f7d5b73019 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.689700] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69133c7-f290-4c6f-a37d-2a23a332fa83 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.711074] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba28ed17-8b65-4c3e-a015-54687d032e5e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.718059] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6b3479-5f31-4ee9-b8a7-0b6b40af45fb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.732368] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467007, 'name': CreateVM_Task, 'duration_secs': 0.355228} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.755210] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 663.756133] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 663.756369] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 663.756598] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 663.757359] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16892989-dece-415e-8170-457b048df79c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.759999] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67d72250-0485-41f8-a7c9-25f29f15fb74 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.765228] env[69784]: DEBUG oslo_vmware.api [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Waiting for the task: (returnval){ [ 663.765228] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]524c84c0-4086-bc6a-da6d-e1c1f73d63c4" [ 663.765228] env[69784]: _type = "Task" [ 663.765228] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.771731] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b1585b-5c00-4a0f-89d7-8a978e751ce0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.789666] env[69784]: DEBUG nova.compute.provider_tree [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.790935] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 663.791652] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.791777] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 663.799808] env[69784]: DEBUG nova.scheduler.client.report [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 663.810969] env[69784]: DEBUG nova.policy [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c93b274686c34049be1b37ef70656616', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0126dda00a44838ac749dee6f266970', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 663.825878] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.338s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 663.826407] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 663.870334] env[69784]: DEBUG nova.compute.utils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 663.873803] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 663.873977] env[69784]: DEBUG nova.network.neutron [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 663.890266] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 663.976509] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 664.007954] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 664.008211] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 664.009027] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.009027] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 664.009027] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.009027] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 664.009211] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 664.009381] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 664.009582] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 664.009775] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 664.009988] env[69784]: DEBUG nova.virt.hardware [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 664.010857] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ace6a48-11c1-4f92-827c-c2c1f95a31d3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.019804] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b83ac9-d468-4e5e-a4e5-a067c649d5ab {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.054493] env[69784]: DEBUG nova.compute.manager [req-4adedf04-7621-4f00-b679-dfb1e0160cfa req-513cff6c-0704-4edc-8998-c26210bd57e0 service nova] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Received event network-vif-plugged-e1380432-497a-4ed0-ae9c-a7258fdbc796 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 664.054493] env[69784]: DEBUG oslo_concurrency.lockutils [req-4adedf04-7621-4f00-b679-dfb1e0160cfa req-513cff6c-0704-4edc-8998-c26210bd57e0 service nova] Acquiring lock "e2045977-fc76-4694-b233-ee747d1a1837-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 664.054493] env[69784]: DEBUG oslo_concurrency.lockutils [req-4adedf04-7621-4f00-b679-dfb1e0160cfa req-513cff6c-0704-4edc-8998-c26210bd57e0 service nova] Lock "e2045977-fc76-4694-b233-ee747d1a1837-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 664.054493] env[69784]: DEBUG oslo_concurrency.lockutils [req-4adedf04-7621-4f00-b679-dfb1e0160cfa req-513cff6c-0704-4edc-8998-c26210bd57e0 service nova] Lock "e2045977-fc76-4694-b233-ee747d1a1837-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 664.054785] env[69784]: DEBUG nova.compute.manager [req-4adedf04-7621-4f00-b679-dfb1e0160cfa req-513cff6c-0704-4edc-8998-c26210bd57e0 service nova] [instance: e2045977-fc76-4694-b233-ee747d1a1837] No waiting events found dispatching network-vif-plugged-e1380432-497a-4ed0-ae9c-a7258fdbc796 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 664.054785] env[69784]: WARNING nova.compute.manager [req-4adedf04-7621-4f00-b679-dfb1e0160cfa req-513cff6c-0704-4edc-8998-c26210bd57e0 service nova] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Received unexpected event network-vif-plugged-e1380432-497a-4ed0-ae9c-a7258fdbc796 for instance with vm_state building and task_state spawning. [ 664.084199] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 664.084199] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Creating directory with path [datastore1] vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.084199] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d5f702e-b4d0-4947-b8d1-c8da38a1bb80 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.106172] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Created directory with path [datastore1] vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.106452] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Fetch image to [datastore1] vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 664.106689] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 664.107809] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e97d52-0018-43f6-ae81-034f8e184611 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.119176] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa6ec47-9ddd-4060-82f8-fbc85ffba0a3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.133527] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f04d5ca-7e86-4780-bb17-e0730598fae8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.172809] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228a5bbc-b417-47dc-84d9-bfa5b4fa7e49 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.180111] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-606568c5-8661-4560-8ce3-26c967f76ff8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.215116] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 664.290102] env[69784]: DEBUG oslo_vmware.rw_handles [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 664.352030] env[69784]: DEBUG oslo_vmware.rw_handles [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 664.352229] env[69784]: DEBUG oslo_vmware.rw_handles [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 664.426378] env[69784]: DEBUG nova.policy [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21a99ca0c0dc46ca9077760f710dbd6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6cb9a697af9446618ca4f75949110582', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 664.702808] env[69784]: DEBUG nova.network.neutron [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Successfully created port: df8e4607-0bbd-4265-9851-9548c41ae7f8 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.340469] env[69784]: DEBUG nova.network.neutron [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Successfully created port: fd23e706-e14f-43a3-836a-1c2be723e052 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.492822] env[69784]: DEBUG nova.network.neutron [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Successfully updated port: 596c2edd-b1b5-4640-9f4a-3f086d653a2a {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.503550] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquiring lock "refresh_cache-150d9f3b-b338-4810-ad32-7c8609131ce4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 665.503825] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquired lock "refresh_cache-150d9f3b-b338-4810-ad32-7c8609131ce4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 665.504066] env[69784]: DEBUG nova.network.neutron [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 665.651099] env[69784]: DEBUG nova.network.neutron [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.796088] env[69784]: DEBUG nova.network.neutron [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Successfully updated port: 5a2e4b5d-a74e-4b25-bd20-634fb41af4da {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.809753] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "refresh_cache-cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 665.809753] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquired lock "refresh_cache-cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 665.810147] env[69784]: DEBUG nova.network.neutron [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 665.995973] env[69784]: DEBUG nova.network.neutron [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.443245] env[69784]: DEBUG nova.network.neutron [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Successfully created port: 1ff7f733-8031-49d3-9187-1cd25caea187 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.541449] env[69784]: DEBUG nova.network.neutron [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Updating instance_info_cache with network_info: [{"id": "5a2e4b5d-a74e-4b25-bd20-634fb41af4da", "address": "fa:16:3e:31:07:41", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a2e4b5d-a7", "ovs_interfaceid": "5a2e4b5d-a74e-4b25-bd20-634fb41af4da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.555696] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Releasing lock "refresh_cache-cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 666.555696] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Instance network_info: |[{"id": "5a2e4b5d-a74e-4b25-bd20-634fb41af4da", "address": "fa:16:3e:31:07:41", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a2e4b5d-a7", "ovs_interfaceid": "5a2e4b5d-a74e-4b25-bd20-634fb41af4da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 666.556128] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:07:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a2e4b5d-a74e-4b25-bd20-634fb41af4da', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 666.566877] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Creating folder: Project (e92cdeb6b85443d9b7035a4523733e13). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 666.566877] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b86bdb85-798b-4c46-9213-4a035824f27a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.580658] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Created folder: Project (e92cdeb6b85443d9b7035a4523733e13) in parent group-v692547. [ 666.580658] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Creating folder: Instances. Parent ref: group-v692554. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 666.580658] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6b529f7-b532-43eb-8d32-c12ac0570352 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.589312] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Created folder: Instances in parent group-v692554. [ 666.589960] env[69784]: DEBUG oslo.service.loopingcall [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.590892] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 666.590892] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2b9a93f-9752-462a-997d-01e53eb6084f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.618376] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 666.618376] env[69784]: value = "task-3467010" [ 666.618376] env[69784]: _type = "Task" [ 666.618376] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.632901] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467010, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.835589] env[69784]: DEBUG nova.network.neutron [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Updating instance_info_cache with network_info: [{"id": "596c2edd-b1b5-4640-9f4a-3f086d653a2a", "address": "fa:16:3e:6a:8a:40", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap596c2edd-b1", "ovs_interfaceid": "596c2edd-b1b5-4640-9f4a-3f086d653a2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.853008] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Releasing lock "refresh_cache-150d9f3b-b338-4810-ad32-7c8609131ce4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 666.853008] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Instance network_info: |[{"id": "596c2edd-b1b5-4640-9f4a-3f086d653a2a", "address": "fa:16:3e:6a:8a:40", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap596c2edd-b1", "ovs_interfaceid": "596c2edd-b1b5-4640-9f4a-3f086d653a2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 666.853202] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:8a:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '596c2edd-b1b5-4640-9f4a-3f086d653a2a', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 666.863469] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Creating folder: Project (2703b16bb66a41caa48e003d26148af6). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 666.865941] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e293795f-a334-4a88-9cd0-e7169054b317 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.878690] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Created folder: Project (2703b16bb66a41caa48e003d26148af6) in parent group-v692547. [ 666.878866] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Creating folder: Instances. Parent ref: group-v692557. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 666.879121] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5be5d43-4257-4883-bced-1e8c7ffafced {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.889486] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Created folder: Instances in parent group-v692557. [ 666.889739] env[69784]: DEBUG oslo.service.loopingcall [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.889962] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 666.890195] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92c0e735-a9f6-4c7f-88ef-56bde15c2a09 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.918908] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 666.918908] env[69784]: value = "task-3467013" [ 666.918908] env[69784]: _type = "Task" [ 666.918908] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.932461] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467013, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.004767] env[69784]: DEBUG nova.network.neutron [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Successfully updated port: df8e4607-0bbd-4265-9851-9548c41ae7f8 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.025348] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquiring lock "refresh_cache-15e3e0f5-1967-4f7d-b45f-954845c3dc75" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 667.025619] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquired lock "refresh_cache-15e3e0f5-1967-4f7d-b45f-954845c3dc75" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 667.026474] env[69784]: DEBUG nova.network.neutron [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 667.138553] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467010, 'name': CreateVM_Task, 'duration_secs': 0.341674} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.141241] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 667.143053] env[69784]: DEBUG nova.compute.manager [req-0a3e03e0-f332-4aaf-a017-531c240d5032 req-502f7bb5-4d9d-4ee6-ba42-8e43afd3a232 service nova] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Received event network-vif-plugged-596c2edd-b1b5-4640-9f4a-3f086d653a2a {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 667.143111] env[69784]: DEBUG oslo_concurrency.lockutils [req-0a3e03e0-f332-4aaf-a017-531c240d5032 req-502f7bb5-4d9d-4ee6-ba42-8e43afd3a232 service nova] Acquiring lock "150d9f3b-b338-4810-ad32-7c8609131ce4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 667.143526] env[69784]: DEBUG oslo_concurrency.lockutils [req-0a3e03e0-f332-4aaf-a017-531c240d5032 req-502f7bb5-4d9d-4ee6-ba42-8e43afd3a232 service nova] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 667.143526] env[69784]: DEBUG oslo_concurrency.lockutils [req-0a3e03e0-f332-4aaf-a017-531c240d5032 req-502f7bb5-4d9d-4ee6-ba42-8e43afd3a232 service nova] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 667.143712] env[69784]: DEBUG nova.compute.manager [req-0a3e03e0-f332-4aaf-a017-531c240d5032 req-502f7bb5-4d9d-4ee6-ba42-8e43afd3a232 service nova] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] No waiting events found dispatching network-vif-plugged-596c2edd-b1b5-4640-9f4a-3f086d653a2a {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 667.143780] env[69784]: WARNING nova.compute.manager [req-0a3e03e0-f332-4aaf-a017-531c240d5032 req-502f7bb5-4d9d-4ee6-ba42-8e43afd3a232 service nova] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Received unexpected event network-vif-plugged-596c2edd-b1b5-4640-9f4a-3f086d653a2a for instance with vm_state building and task_state spawning. [ 667.144565] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 667.144753] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 667.146838] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 667.146838] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93a50bf5-0ae1-4897-ae20-003233214561 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.155068] env[69784]: DEBUG oslo_vmware.api [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for the task: (returnval){ [ 667.155068] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]522e4922-5071-6e54-d2c7-dcf1e60acc59" [ 667.155068] env[69784]: _type = "Task" [ 667.155068] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.174833] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 667.174833] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 667.174833] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 667.332370] env[69784]: DEBUG nova.network.neutron [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.432158] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467013, 'name': CreateVM_Task, 'duration_secs': 0.299376} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.432158] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 667.433257] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 667.433420] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 667.433720] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 667.434263] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef330db-6d61-4f15-b59e-a659d3b57ed8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.440343] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Waiting for the task: (returnval){ [ 667.440343] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52bf4cb5-bfe2-884c-f053-3af32b6d358d" [ 667.440343] env[69784]: _type = "Task" [ 667.440343] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.452040] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52bf4cb5-bfe2-884c-f053-3af32b6d358d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.597389] env[69784]: DEBUG nova.network.neutron [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Successfully updated port: fd23e706-e14f-43a3-836a-1c2be723e052 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.613322] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "refresh_cache-28e32097-d536-442f-bcb4-f93b64cb64e4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 667.613587] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "refresh_cache-28e32097-d536-442f-bcb4-f93b64cb64e4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 667.613715] env[69784]: DEBUG nova.network.neutron [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 667.734290] env[69784]: DEBUG nova.network.neutron [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.944807] env[69784]: DEBUG nova.network.neutron [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Updating instance_info_cache with network_info: [{"id": "df8e4607-0bbd-4265-9851-9548c41ae7f8", "address": "fa:16:3e:f9:be:d3", "network": {"id": "7e892938-096d-43a3-bc8b-f11cf33ca144", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1877084576-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45a6b45b635c4110820784a8b63df746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3f695b6-65bc-45cc-a61d-3c38a14e5c0c", "external-id": "nsx-vlan-transportzone-559", "segmentation_id": 559, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf8e4607-0b", "ovs_interfaceid": "df8e4607-0bbd-4265-9851-9548c41ae7f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.961359] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 667.961845] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 667.962195] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 667.971018] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Releasing lock "refresh_cache-15e3e0f5-1967-4f7d-b45f-954845c3dc75" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 667.971018] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Instance network_info: |[{"id": "df8e4607-0bbd-4265-9851-9548c41ae7f8", "address": "fa:16:3e:f9:be:d3", "network": {"id": "7e892938-096d-43a3-bc8b-f11cf33ca144", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1877084576-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45a6b45b635c4110820784a8b63df746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3f695b6-65bc-45cc-a61d-3c38a14e5c0c", "external-id": "nsx-vlan-transportzone-559", "segmentation_id": 559, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf8e4607-0b", "ovs_interfaceid": "df8e4607-0bbd-4265-9851-9548c41ae7f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 667.971381] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:be:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3f695b6-65bc-45cc-a61d-3c38a14e5c0c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df8e4607-0bbd-4265-9851-9548c41ae7f8', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.984393] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Creating folder: Project (45a6b45b635c4110820784a8b63df746). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 667.989767] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f98a8c76-1f24-47c2-bd3d-2812a4d3028c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.999364] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Created folder: Project (45a6b45b635c4110820784a8b63df746) in parent group-v692547. [ 667.999562] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Creating folder: Instances. Parent ref: group-v692560. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 667.999800] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8072e31-b524-4c62-b264-0a70db09ef85 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.009334] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Created folder: Instances in parent group-v692560. [ 668.010157] env[69784]: DEBUG oslo.service.loopingcall [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.010157] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 668.010157] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dcc03af-4b8f-4b98-8954-d62d070fdceb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.034483] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.034483] env[69784]: value = "task-3467016" [ 668.034483] env[69784]: _type = "Task" [ 668.034483] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.045693] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467016, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.284082] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Received event network-vif-plugged-80d62142-68b0-4b90-9a88-f5488dd3e0d1 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 668.284950] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Acquiring lock "54d12624-7c76-433d-8f1f-3f9bb451e451-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 668.285618] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Lock "54d12624-7c76-433d-8f1f-3f9bb451e451-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 668.285989] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Lock "54d12624-7c76-433d-8f1f-3f9bb451e451-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 668.286339] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] No waiting events found dispatching network-vif-plugged-80d62142-68b0-4b90-9a88-f5488dd3e0d1 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 668.287874] env[69784]: WARNING nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Received unexpected event network-vif-plugged-80d62142-68b0-4b90-9a88-f5488dd3e0d1 for instance with vm_state building and task_state spawning. [ 668.287874] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Received event network-changed-e1380432-497a-4ed0-ae9c-a7258fdbc796 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 668.287874] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Refreshing instance network info cache due to event network-changed-e1380432-497a-4ed0-ae9c-a7258fdbc796. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 668.287874] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Acquiring lock "refresh_cache-e2045977-fc76-4694-b233-ee747d1a1837" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 668.287874] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Acquired lock "refresh_cache-e2045977-fc76-4694-b233-ee747d1a1837" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 668.288113] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Refreshing network info cache for port e1380432-497a-4ed0-ae9c-a7258fdbc796 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 668.436066] env[69784]: DEBUG nova.network.neutron [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Updating instance_info_cache with network_info: [{"id": "fd23e706-e14f-43a3-836a-1c2be723e052", "address": "fa:16:3e:db:4b:70", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd23e706-e1", "ovs_interfaceid": "fd23e706-e14f-43a3-836a-1c2be723e052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.452573] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "refresh_cache-28e32097-d536-442f-bcb4-f93b64cb64e4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 668.452901] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Instance network_info: |[{"id": "fd23e706-e14f-43a3-836a-1c2be723e052", "address": "fa:16:3e:db:4b:70", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd23e706-e1", "ovs_interfaceid": "fd23e706-e14f-43a3-836a-1c2be723e052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 668.453291] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:4b:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ff81f9-72b2-4e58-a8d8-5699907f7459', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd23e706-e14f-43a3-836a-1c2be723e052', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.463322] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating folder: Project (c0126dda00a44838ac749dee6f266970). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 668.464636] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3bdb31f-74c4-461a-b640-c0eb4c85a0f1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.475754] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Created folder: Project (c0126dda00a44838ac749dee6f266970) in parent group-v692547. [ 668.475754] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating folder: Instances. Parent ref: group-v692563. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 668.475754] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2df1d239-e589-4ff7-9fc2-b948e8610f43 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.485356] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Created folder: Instances in parent group-v692563. [ 668.485356] env[69784]: DEBUG oslo.service.loopingcall [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.485356] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 668.485356] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afd865ab-0095-46ce-b215-c62a1fca26f5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.512924] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.512924] env[69784]: value = "task-3467019" [ 668.512924] env[69784]: _type = "Task" [ 668.512924] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.521019] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467019, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.550668] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467016, 'name': CreateVM_Task, 'duration_secs': 0.320377} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.554246] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 668.554246] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 668.554246] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 668.554246] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 668.554246] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42063b88-7ccf-4577-b120-092900d0c53c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.559601] env[69784]: DEBUG oslo_vmware.api [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Waiting for the task: (returnval){ [ 668.559601] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52084b78-fedf-c731-d476-7afcc0c579eb" [ 668.559601] env[69784]: _type = "Task" [ 668.559601] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.574087] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 668.574633] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.575060] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 669.023726] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467019, 'name': CreateVM_Task} progress is 99%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.269408] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Updated VIF entry in instance network info cache for port e1380432-497a-4ed0-ae9c-a7258fdbc796. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 669.269651] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Updating instance_info_cache with network_info: [{"id": "e1380432-497a-4ed0-ae9c-a7258fdbc796", "address": "fa:16:3e:4b:f4:8d", "network": {"id": "1253923e-b2a2-473a-980c-4a2980b07abe", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1695774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c029233dfe747498ed1acedccf31a4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1380432-49", "ovs_interfaceid": "e1380432-497a-4ed0-ae9c-a7258fdbc796", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.288830] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Releasing lock "refresh_cache-e2045977-fc76-4694-b233-ee747d1a1837" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 669.288830] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Received event network-changed-80d62142-68b0-4b90-9a88-f5488dd3e0d1 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 669.288830] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Refreshing instance network info cache due to event network-changed-80d62142-68b0-4b90-9a88-f5488dd3e0d1. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 669.288830] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Acquiring lock "refresh_cache-54d12624-7c76-433d-8f1f-3f9bb451e451" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 669.288830] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Acquired lock "refresh_cache-54d12624-7c76-433d-8f1f-3f9bb451e451" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 669.289000] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Refreshing network info cache for port 80d62142-68b0-4b90-9a88-f5488dd3e0d1 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 669.344456] env[69784]: DEBUG nova.network.neutron [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Successfully updated port: 1ff7f733-8031-49d3-9187-1cd25caea187 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.362983] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "refresh_cache-a927e0ff-6c39-47fd-a082-88d41eb54015" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 669.362983] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquired lock "refresh_cache-a927e0ff-6c39-47fd-a082-88d41eb54015" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 669.363144] env[69784]: DEBUG nova.network.neutron [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 669.479558] env[69784]: DEBUG nova.network.neutron [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.524385] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467019, 'name': CreateVM_Task} progress is 99%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.897689] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Updated VIF entry in instance network info cache for port 80d62142-68b0-4b90-9a88-f5488dd3e0d1. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 669.898273] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Updating instance_info_cache with network_info: [{"id": "80d62142-68b0-4b90-9a88-f5488dd3e0d1", "address": "fa:16:3e:f8:9b:6f", "network": {"id": "29465308-b300-4555-b996-9e6d38f9aa34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1579267705-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a525d26822b747de8c59e091d172dc69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80d62142-68", "ovs_interfaceid": "80d62142-68b0-4b90-9a88-f5488dd3e0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.916996] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Releasing lock "refresh_cache-54d12624-7c76-433d-8f1f-3f9bb451e451" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 669.920852] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Received event network-vif-plugged-5a2e4b5d-a74e-4b25-bd20-634fb41af4da {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 669.920852] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Acquiring lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 669.920852] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 669.920852] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 669.921324] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] No waiting events found dispatching network-vif-plugged-5a2e4b5d-a74e-4b25-bd20-634fb41af4da {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 669.921324] env[69784]: WARNING nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Received unexpected event network-vif-plugged-5a2e4b5d-a74e-4b25-bd20-634fb41af4da for instance with vm_state building and task_state spawning. [ 669.921324] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Received event network-changed-5a2e4b5d-a74e-4b25-bd20-634fb41af4da {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 669.921324] env[69784]: DEBUG nova.compute.manager [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Refreshing instance network info cache due to event network-changed-5a2e4b5d-a74e-4b25-bd20-634fb41af4da. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 669.921324] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Acquiring lock "refresh_cache-cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 669.921502] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Acquired lock "refresh_cache-cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 669.921502] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Refreshing network info cache for port 5a2e4b5d-a74e-4b25-bd20-634fb41af4da {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 670.024102] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467019, 'name': CreateVM_Task, 'duration_secs': 1.379284} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.025018] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 670.026270] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 670.026608] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 670.027039] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 670.027405] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e64f75a-a1ca-4315-a468-37691aff66f4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.036016] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 670.036016] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52d68a32-897b-83ed-dbd9-fc7de808daca" [ 670.036016] env[69784]: _type = "Task" [ 670.036016] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.043920] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52d68a32-897b-83ed-dbd9-fc7de808daca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.414118] env[69784]: DEBUG nova.network.neutron [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Updating instance_info_cache with network_info: [{"id": "1ff7f733-8031-49d3-9187-1cd25caea187", "address": "fa:16:3e:e4:d7:0e", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ff7f733-80", "ovs_interfaceid": "1ff7f733-8031-49d3-9187-1cd25caea187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.431327] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Releasing lock "refresh_cache-a927e0ff-6c39-47fd-a082-88d41eb54015" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 670.431857] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Instance network_info: |[{"id": "1ff7f733-8031-49d3-9187-1cd25caea187", "address": "fa:16:3e:e4:d7:0e", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ff7f733-80", "ovs_interfaceid": "1ff7f733-8031-49d3-9187-1cd25caea187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 670.432936] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:d7:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ff7f733-8031-49d3-9187-1cd25caea187', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.444143] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Creating folder: Project (6cb9a697af9446618ca4f75949110582). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 670.444840] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69f4ee24-1e4a-43a1-a4e8-be611883e4f8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.456474] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Created folder: Project (6cb9a697af9446618ca4f75949110582) in parent group-v692547. [ 670.456474] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Creating folder: Instances. Parent ref: group-v692566. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 670.456474] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62c5db8a-f98d-4b86-930d-e93eef119961 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.466056] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Created folder: Instances in parent group-v692566. [ 670.466939] env[69784]: DEBUG oslo.service.loopingcall [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.466939] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 670.466939] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75f4d81a-af68-4bdd-8b74-3f987d2ffa57 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.492963] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.492963] env[69784]: value = "task-3467022" [ 670.492963] env[69784]: _type = "Task" [ 670.492963] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.502158] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467022, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.553772] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 670.554152] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 670.554462] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 671.002352] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467022, 'name': CreateVM_Task, 'duration_secs': 0.284385} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.002513] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 671.003371] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 671.003533] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 671.003840] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 671.004108] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1344f4c4-408c-479a-9d96-32cbdc0483e8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.008729] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Waiting for the task: (returnval){ [ 671.008729] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52924f5a-c8e7-e6c2-ba21-5fcb9c154441" [ 671.008729] env[69784]: _type = "Task" [ 671.008729] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.017759] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52924f5a-c8e7-e6c2-ba21-5fcb9c154441, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.031116] env[69784]: DEBUG nova.compute.manager [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Received event network-changed-596c2edd-b1b5-4640-9f4a-3f086d653a2a {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 671.031312] env[69784]: DEBUG nova.compute.manager [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Refreshing instance network info cache due to event network-changed-596c2edd-b1b5-4640-9f4a-3f086d653a2a. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 671.031523] env[69784]: DEBUG oslo_concurrency.lockutils [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] Acquiring lock "refresh_cache-150d9f3b-b338-4810-ad32-7c8609131ce4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 671.031730] env[69784]: DEBUG oslo_concurrency.lockutils [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] Acquired lock "refresh_cache-150d9f3b-b338-4810-ad32-7c8609131ce4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 671.031907] env[69784]: DEBUG nova.network.neutron [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Refreshing network info cache for port 596c2edd-b1b5-4640-9f4a-3f086d653a2a {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 671.038273] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Updated VIF entry in instance network info cache for port 5a2e4b5d-a74e-4b25-bd20-634fb41af4da. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 671.038586] env[69784]: DEBUG nova.network.neutron [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Updating instance_info_cache with network_info: [{"id": "5a2e4b5d-a74e-4b25-bd20-634fb41af4da", "address": "fa:16:3e:31:07:41", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a2e4b5d-a7", "ovs_interfaceid": "5a2e4b5d-a74e-4b25-bd20-634fb41af4da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.052480] env[69784]: DEBUG oslo_concurrency.lockutils [req-574b01ac-98c5-473c-92d6-d1b1ad8c782b req-a7b2573c-0d5d-48de-8f37-28306d8be7a5 service nova] Releasing lock "refresh_cache-cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 671.524924] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 671.525756] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.525992] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 671.923465] env[69784]: DEBUG nova.network.neutron [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Updated VIF entry in instance network info cache for port 596c2edd-b1b5-4640-9f4a-3f086d653a2a. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 671.923857] env[69784]: DEBUG nova.network.neutron [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Updating instance_info_cache with network_info: [{"id": "596c2edd-b1b5-4640-9f4a-3f086d653a2a", "address": "fa:16:3e:6a:8a:40", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap596c2edd-b1", "ovs_interfaceid": "596c2edd-b1b5-4640-9f4a-3f086d653a2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.939508] env[69784]: DEBUG oslo_concurrency.lockutils [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] Releasing lock "refresh_cache-150d9f3b-b338-4810-ad32-7c8609131ce4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 671.939795] env[69784]: DEBUG nova.compute.manager [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Received event network-vif-plugged-1ff7f733-8031-49d3-9187-1cd25caea187 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 671.940158] env[69784]: DEBUG oslo_concurrency.lockutils [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] Acquiring lock "a927e0ff-6c39-47fd-a082-88d41eb54015-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 671.940255] env[69784]: DEBUG oslo_concurrency.lockutils [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 671.940392] env[69784]: DEBUG oslo_concurrency.lockutils [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 671.940555] env[69784]: DEBUG nova.compute.manager [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] No waiting events found dispatching network-vif-plugged-1ff7f733-8031-49d3-9187-1cd25caea187 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 671.940721] env[69784]: WARNING nova.compute.manager [req-86d644f6-2f2d-4644-ac03-a51daac37fd6 req-161fc2b2-dc98-46d0-a312-3715eb8f8bc6 service nova] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Received unexpected event network-vif-plugged-1ff7f733-8031-49d3-9187-1cd25caea187 for instance with vm_state building and task_state spawning. [ 672.808486] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.808976] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.809335] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 672.809919] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 672.838846] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 672.839867] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 672.839867] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 672.839867] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 672.839867] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 672.840475] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 672.840675] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 672.840838] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 672.841759] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.842793] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.843304] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.843532] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.844895] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.844895] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.878604] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Getting list of instances from cluster (obj){ [ 672.878604] env[69784]: value = "domain-c8" [ 672.878604] env[69784]: _type = "ClusterComputeResource" [ 672.878604] env[69784]: } {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 672.880785] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46325d91-7c52-4499-8974-3fb9b13f3961 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.897993] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Got total of 7 instances {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 672.898208] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid e2045977-fc76-4694-b233-ee747d1a1837 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 672.898446] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 54d12624-7c76-433d-8f1f-3f9bb451e451 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 672.898629] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 150d9f3b-b338-4810-ad32-7c8609131ce4 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 672.898783] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid cec4f0d1-1b7d-4189-aadf-6f801f52d9bb {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 672.898932] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 15e3e0f5-1967-4f7d-b45f-954845c3dc75 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 672.899126] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 28e32097-d536-442f-bcb4-f93b64cb64e4 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 672.899570] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid a927e0ff-6c39-47fd-a082-88d41eb54015 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 672.899956] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "e2045977-fc76-4694-b233-ee747d1a1837" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.900220] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "54d12624-7c76-433d-8f1f-3f9bb451e451" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.900439] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "150d9f3b-b338-4810-ad32-7c8609131ce4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.900634] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.900825] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.901307] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "28e32097-d536-442f-bcb4-f93b64cb64e4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.901602] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "a927e0ff-6c39-47fd-a082-88d41eb54015" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.901800] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.902041] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 672.902210] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.916820] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.916968] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 672.917222] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 672.917222] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 672.920068] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ae1d11-aa88-4d3b-b4ae-cd29e7271920 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.929289] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2adb19-7d5c-411c-a618-a585c87659cc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.946034] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43af72a0-50d6-4b86-bc9c-bc7c1236d20f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.954464] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec698d17-fc4b-49f3-adfb-367faca7f3fc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.989447] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180977MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 672.989795] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 672.990036] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 673.053119] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquiring lock "c16f7920-23d4-4d77-b70f-118887cc9ff7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 673.053119] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "c16f7920-23d4-4d77-b70f-118887cc9ff7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 673.065982] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 673.116581] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e2045977-fc76-4694-b233-ee747d1a1837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.116581] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 54d12624-7c76-433d-8f1f-3f9bb451e451 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.116581] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 150d9f3b-b338-4810-ad32-7c8609131ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.116581] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cec4f0d1-1b7d-4189-aadf-6f801f52d9bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.120319] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 15e3e0f5-1967-4f7d-b45f-954845c3dc75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.120319] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 28e32097-d536-442f-bcb4-f93b64cb64e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.120319] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a927e0ff-6c39-47fd-a082-88d41eb54015 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.150657] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 673.157905] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c16f7920-23d4-4d77-b70f-118887cc9ff7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 673.157905] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 673.158316] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 673.211688] env[69784]: DEBUG nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Received event network-vif-plugged-df8e4607-0bbd-4265-9851-9548c41ae7f8 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 673.211950] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Acquiring lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 673.213606] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 673.213606] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 673.213606] env[69784]: DEBUG nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] No waiting events found dispatching network-vif-plugged-df8e4607-0bbd-4265-9851-9548c41ae7f8 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 673.213606] env[69784]: WARNING nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Received unexpected event network-vif-plugged-df8e4607-0bbd-4265-9851-9548c41ae7f8 for instance with vm_state building and task_state spawning. [ 673.213844] env[69784]: DEBUG nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Received event network-vif-plugged-fd23e706-e14f-43a3-836a-1c2be723e052 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 673.213844] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Acquiring lock "28e32097-d536-442f-bcb4-f93b64cb64e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 673.213844] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 673.213844] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 673.213963] env[69784]: DEBUG nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] No waiting events found dispatching network-vif-plugged-fd23e706-e14f-43a3-836a-1c2be723e052 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 673.213963] env[69784]: WARNING nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Received unexpected event network-vif-plugged-fd23e706-e14f-43a3-836a-1c2be723e052 for instance with vm_state building and task_state spawning. [ 673.213963] env[69784]: DEBUG nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Received event network-changed-df8e4607-0bbd-4265-9851-9548c41ae7f8 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 673.214074] env[69784]: DEBUG nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Refreshing instance network info cache due to event network-changed-df8e4607-0bbd-4265-9851-9548c41ae7f8. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 673.214223] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Acquiring lock "refresh_cache-15e3e0f5-1967-4f7d-b45f-954845c3dc75" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 673.214351] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Acquired lock "refresh_cache-15e3e0f5-1967-4f7d-b45f-954845c3dc75" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 673.214790] env[69784]: DEBUG nova.network.neutron [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Refreshing network info cache for port df8e4607-0bbd-4265-9851-9548c41ae7f8 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 673.342982] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c972e161-a840-4cd2-86c1-617c289d6e6e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.353880] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e037690-0fa9-42ad-87e6-2a0331629c5c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.389089] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dadb0e-6d59-45f7-ac2e-6076dd68e399 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.400618] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4c0464-888d-4242-9a57-0e88945d34ff {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.412531] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.431138] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 673.450880] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 673.451127] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.461s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 673.452029] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.301s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 673.453382] env[69784]: INFO nova.compute.claims [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.456519] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.457095] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Getting list of instances from cluster (obj){ [ 673.457095] env[69784]: value = "domain-c8" [ 673.457095] env[69784]: _type = "ClusterComputeResource" [ 673.457095] env[69784]: } {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 673.458740] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd3efdd-f77f-4e96-9063-a74f0ebc88a2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.480047] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Got total of 7 instances {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 673.726320] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e036e96d-291b-4a33-a26e-3c15796b82a6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.738093] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44579191-4cf2-4469-a588-bff17ed309a1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.362808] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4e1f16-675e-4b5c-9958-192b224ca838 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.374774] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92347889-3bea-4181-a346-fa3b6017fac6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.393278] env[69784]: DEBUG nova.compute.provider_tree [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.411709] env[69784]: DEBUG nova.scheduler.client.report [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 674.425410] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.974s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 674.425936] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 674.523683] env[69784]: DEBUG nova.compute.utils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 674.528132] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 674.528524] env[69784]: DEBUG nova.network.neutron [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 674.541868] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 674.633291] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 674.664499] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 674.664901] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 674.664901] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 674.665093] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 674.665289] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 674.665385] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 674.665681] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 674.665849] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 674.666039] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 674.666208] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 674.666377] env[69784]: DEBUG nova.virt.hardware [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 674.667302] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be4b422-1eba-41c5-9460-6bce02e146b9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.676218] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3539230-fc3a-4f76-9eae-1aa8b85bc607 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.011603] env[69784]: DEBUG nova.policy [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5f6eee73d7a43e0b7e0cdd76c849cbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '031e5a032cf0406098926e4c7cd5b6b6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 675.047138] env[69784]: DEBUG nova.network.neutron [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Updated VIF entry in instance network info cache for port df8e4607-0bbd-4265-9851-9548c41ae7f8. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 675.047957] env[69784]: DEBUG nova.network.neutron [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Updating instance_info_cache with network_info: [{"id": "df8e4607-0bbd-4265-9851-9548c41ae7f8", "address": "fa:16:3e:f9:be:d3", "network": {"id": "7e892938-096d-43a3-bc8b-f11cf33ca144", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1877084576-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45a6b45b635c4110820784a8b63df746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3f695b6-65bc-45cc-a61d-3c38a14e5c0c", "external-id": "nsx-vlan-transportzone-559", "segmentation_id": 559, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf8e4607-0b", "ovs_interfaceid": "df8e4607-0bbd-4265-9851-9548c41ae7f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.063906] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Releasing lock "refresh_cache-15e3e0f5-1967-4f7d-b45f-954845c3dc75" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 675.064218] env[69784]: DEBUG nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Received event network-changed-fd23e706-e14f-43a3-836a-1c2be723e052 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 675.064387] env[69784]: DEBUG nova.compute.manager [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Refreshing instance network info cache due to event network-changed-fd23e706-e14f-43a3-836a-1c2be723e052. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 675.066426] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Acquiring lock "refresh_cache-28e32097-d536-442f-bcb4-f93b64cb64e4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 675.066426] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Acquired lock "refresh_cache-28e32097-d536-442f-bcb4-f93b64cb64e4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 675.066426] env[69784]: DEBUG nova.network.neutron [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Refreshing network info cache for port fd23e706-e14f-43a3-836a-1c2be723e052 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 676.860274] env[69784]: DEBUG nova.network.neutron [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Successfully created port: a7b68e59-1c65-4cd6-b005-dd73ea0d25fa {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.897420] env[69784]: DEBUG nova.network.neutron [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Updated VIF entry in instance network info cache for port fd23e706-e14f-43a3-836a-1c2be723e052. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 676.899568] env[69784]: DEBUG nova.network.neutron [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Updating instance_info_cache with network_info: [{"id": "fd23e706-e14f-43a3-836a-1c2be723e052", "address": "fa:16:3e:db:4b:70", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd23e706-e1", "ovs_interfaceid": "fd23e706-e14f-43a3-836a-1c2be723e052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.912210] env[69784]: DEBUG oslo_concurrency.lockutils [req-9b6db25b-dec8-41a5-81f6-6a76c564fd7c req-19250ba1-3146-48ab-ac37-1b3b48f9b7c3 service nova] Releasing lock "refresh_cache-28e32097-d536-442f-bcb4-f93b64cb64e4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 677.064756] env[69784]: DEBUG nova.compute.manager [req-4074b677-00e9-49d9-9699-a7187a50d654 req-47952841-f304-4336-b616-0ec9f8a2a8c7 service nova] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Received event network-changed-1ff7f733-8031-49d3-9187-1cd25caea187 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 677.064959] env[69784]: DEBUG nova.compute.manager [req-4074b677-00e9-49d9-9699-a7187a50d654 req-47952841-f304-4336-b616-0ec9f8a2a8c7 service nova] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Refreshing instance network info cache due to event network-changed-1ff7f733-8031-49d3-9187-1cd25caea187. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 677.066437] env[69784]: DEBUG oslo_concurrency.lockutils [req-4074b677-00e9-49d9-9699-a7187a50d654 req-47952841-f304-4336-b616-0ec9f8a2a8c7 service nova] Acquiring lock "refresh_cache-a927e0ff-6c39-47fd-a082-88d41eb54015" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 677.066437] env[69784]: DEBUG oslo_concurrency.lockutils [req-4074b677-00e9-49d9-9699-a7187a50d654 req-47952841-f304-4336-b616-0ec9f8a2a8c7 service nova] Acquired lock "refresh_cache-a927e0ff-6c39-47fd-a082-88d41eb54015" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 677.066437] env[69784]: DEBUG nova.network.neutron [req-4074b677-00e9-49d9-9699-a7187a50d654 req-47952841-f304-4336-b616-0ec9f8a2a8c7 service nova] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Refreshing network info cache for port 1ff7f733-8031-49d3-9187-1cd25caea187 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 678.574493] env[69784]: DEBUG nova.network.neutron [req-4074b677-00e9-49d9-9699-a7187a50d654 req-47952841-f304-4336-b616-0ec9f8a2a8c7 service nova] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Updated VIF entry in instance network info cache for port 1ff7f733-8031-49d3-9187-1cd25caea187. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 678.574493] env[69784]: DEBUG nova.network.neutron [req-4074b677-00e9-49d9-9699-a7187a50d654 req-47952841-f304-4336-b616-0ec9f8a2a8c7 service nova] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Updating instance_info_cache with network_info: [{"id": "1ff7f733-8031-49d3-9187-1cd25caea187", "address": "fa:16:3e:e4:d7:0e", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ff7f733-80", "ovs_interfaceid": "1ff7f733-8031-49d3-9187-1cd25caea187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.592945] env[69784]: DEBUG oslo_concurrency.lockutils [req-4074b677-00e9-49d9-9699-a7187a50d654 req-47952841-f304-4336-b616-0ec9f8a2a8c7 service nova] Releasing lock "refresh_cache-a927e0ff-6c39-47fd-a082-88d41eb54015" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 680.774534] env[69784]: DEBUG nova.network.neutron [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Successfully updated port: a7b68e59-1c65-4cd6-b005-dd73ea0d25fa {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 680.787463] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquiring lock "refresh_cache-c16f7920-23d4-4d77-b70f-118887cc9ff7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 680.787964] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquired lock "refresh_cache-c16f7920-23d4-4d77-b70f-118887cc9ff7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 680.788165] env[69784]: DEBUG nova.network.neutron [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 680.966988] env[69784]: DEBUG nova.network.neutron [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.064476] env[69784]: DEBUG nova.network.neutron [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Updating instance_info_cache with network_info: [{"id": "a7b68e59-1c65-4cd6-b005-dd73ea0d25fa", "address": "fa:16:3e:78:cd:b7", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b68e59-1c", "ovs_interfaceid": "a7b68e59-1c65-4cd6-b005-dd73ea0d25fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.082781] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Releasing lock "refresh_cache-c16f7920-23d4-4d77-b70f-118887cc9ff7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 682.084229] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Instance network_info: |[{"id": "a7b68e59-1c65-4cd6-b005-dd73ea0d25fa", "address": "fa:16:3e:78:cd:b7", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b68e59-1c", "ovs_interfaceid": "a7b68e59-1c65-4cd6-b005-dd73ea0d25fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 682.084959] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:cd:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7b68e59-1c65-4cd6-b005-dd73ea0d25fa', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.101100] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Creating folder: Project (031e5a032cf0406098926e4c7cd5b6b6). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 682.101100] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9502706f-ac78-4e58-ae96-8e5e4c09431e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.112400] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Created folder: Project (031e5a032cf0406098926e4c7cd5b6b6) in parent group-v692547. [ 682.113449] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Creating folder: Instances. Parent ref: group-v692569. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 682.113449] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1ea0cc4-d10a-421c-8299-943e5a663155 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.121281] env[69784]: DEBUG nova.compute.manager [req-12c692ec-96ba-4f05-89c1-e5278defcfe6 req-216da7b8-9452-4a20-9ed7-968f45a3cdef service nova] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Received event network-vif-plugged-a7b68e59-1c65-4cd6-b005-dd73ea0d25fa {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 682.121419] env[69784]: DEBUG oslo_concurrency.lockutils [req-12c692ec-96ba-4f05-89c1-e5278defcfe6 req-216da7b8-9452-4a20-9ed7-968f45a3cdef service nova] Acquiring lock "c16f7920-23d4-4d77-b70f-118887cc9ff7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 682.121645] env[69784]: DEBUG oslo_concurrency.lockutils [req-12c692ec-96ba-4f05-89c1-e5278defcfe6 req-216da7b8-9452-4a20-9ed7-968f45a3cdef service nova] Lock "c16f7920-23d4-4d77-b70f-118887cc9ff7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 682.121876] env[69784]: DEBUG oslo_concurrency.lockutils [req-12c692ec-96ba-4f05-89c1-e5278defcfe6 req-216da7b8-9452-4a20-9ed7-968f45a3cdef service nova] Lock "c16f7920-23d4-4d77-b70f-118887cc9ff7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 682.121979] env[69784]: DEBUG nova.compute.manager [req-12c692ec-96ba-4f05-89c1-e5278defcfe6 req-216da7b8-9452-4a20-9ed7-968f45a3cdef service nova] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] No waiting events found dispatching network-vif-plugged-a7b68e59-1c65-4cd6-b005-dd73ea0d25fa {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 682.122154] env[69784]: WARNING nova.compute.manager [req-12c692ec-96ba-4f05-89c1-e5278defcfe6 req-216da7b8-9452-4a20-9ed7-968f45a3cdef service nova] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Received unexpected event network-vif-plugged-a7b68e59-1c65-4cd6-b005-dd73ea0d25fa for instance with vm_state building and task_state spawning. [ 682.136051] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Created folder: Instances in parent group-v692569. [ 682.136051] env[69784]: DEBUG oslo.service.loopingcall [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.136277] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 682.136492] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4aef4d9-fedf-4d18-8fa8-0d5b77c2d651 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.165013] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.165013] env[69784]: value = "task-3467025" [ 682.165013] env[69784]: _type = "Task" [ 682.165013] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.174732] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467025, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.678661] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467025, 'name': CreateVM_Task, 'duration_secs': 0.293931} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.679298] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 682.679827] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 682.679996] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 682.680397] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 682.680662] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a138b913-7c97-4ede-8c44-a82ab784b52b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.686975] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Waiting for the task: (returnval){ [ 682.686975] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52521777-de06-1bb4-5a64-fa9f4e6cd2bf" [ 682.686975] env[69784]: _type = "Task" [ 682.686975] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.699750] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52521777-de06-1bb4-5a64-fa9f4e6cd2bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.200137] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 683.200458] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.201318] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 683.568483] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "c43ca674-06b8-4b5d-a709-2df095b509f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 683.568975] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "c43ca674-06b8-4b5d-a709-2df095b509f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 683.585418] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 683.602543] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 683.602842] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 683.615010] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 683.684819] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "65601835-8d30-46b8-b928-b3912d058c6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 683.686143] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "65601835-8d30-46b8-b928-b3912d058c6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 683.692766] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 683.693000] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 683.694923] env[69784]: INFO nova.compute.claims [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.699360] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 683.975205] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7166ebe-c622-4e34-a538-cfc2920347f5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.984024] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08113ab-a8ed-4973-aa32-b3555864fde7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.018339] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45843c6-71f7-4efa-8e30-e6d7337df7be {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.027512] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72db5bd4-485c-4a6e-bee6-e74ee83ed126 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.044225] env[69784]: DEBUG nova.compute.provider_tree [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.053776] env[69784]: DEBUG nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 684.073447] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.377s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 684.073447] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 684.080022] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.374s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 684.080022] env[69784]: INFO nova.compute.claims [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 684.125253] env[69784]: DEBUG nova.compute.utils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 684.126883] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 684.126883] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 684.141229] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 684.281789] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 684.329565] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 684.329939] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 684.330094] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.330663] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 684.330831] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.330983] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 684.331640] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 684.331640] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 684.331798] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 684.332073] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 684.332382] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 684.333564] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d0d403-4659-46bc-a72c-abac85ba0b33 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.354321] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f433cc6-78a6-4cfd-9eef-54066159b986 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.371106] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "9369b20b-7027-47de-8495-a503ddfb69bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 684.372647] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "9369b20b-7027-47de-8495-a503ddfb69bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 684.472134] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5663d81-b8f8-40da-aa28-50ce8ae355d1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.483717] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4eb237-1163-435a-bc63-a252835eccc7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.517068] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463831fa-93f9-495d-961c-df43c56abecc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.526055] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e6de50-ecc5-42f0-99d5-8e75583b515c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.541934] env[69784]: DEBUG nova.compute.provider_tree [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.563296] env[69784]: DEBUG nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 684.587892] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.515s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 684.588451] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 684.639393] env[69784]: DEBUG nova.compute.utils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 684.643114] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 684.643114] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 684.663786] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 684.683044] env[69784]: DEBUG nova.policy [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83eed4c4733d4a15803de5acdbc3a679', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a661683342d34a0eb49a0fa3e979028b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 684.756053] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 684.800353] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 684.800753] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 684.800992] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.801279] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 684.801495] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.801707] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 684.802022] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 684.802260] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 684.802531] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 684.803044] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 684.803351] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 684.804367] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47567ce-2e4e-4ef0-b4ac-9630ae39f772 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.814121] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f24291f-e17d-4c74-bf00-45008877b4fa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.132857] env[69784]: DEBUG nova.policy [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83eed4c4733d4a15803de5acdbc3a679', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a661683342d34a0eb49a0fa3e979028b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 686.494011] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Successfully created port: 4a0ab72b-29eb-42d0-b506-56fd405371c3 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 686.809259] env[69784]: DEBUG nova.compute.manager [req-1502b6b6-ab01-4ca5-9e68-ee70ec0ab287 req-98bd07e1-0a88-4c72-bbe8-0b444947a146 service nova] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Received event network-changed-a7b68e59-1c65-4cd6-b005-dd73ea0d25fa {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 686.809984] env[69784]: DEBUG nova.compute.manager [req-1502b6b6-ab01-4ca5-9e68-ee70ec0ab287 req-98bd07e1-0a88-4c72-bbe8-0b444947a146 service nova] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Refreshing instance network info cache due to event network-changed-a7b68e59-1c65-4cd6-b005-dd73ea0d25fa. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 686.810544] env[69784]: DEBUG oslo_concurrency.lockutils [req-1502b6b6-ab01-4ca5-9e68-ee70ec0ab287 req-98bd07e1-0a88-4c72-bbe8-0b444947a146 service nova] Acquiring lock "refresh_cache-c16f7920-23d4-4d77-b70f-118887cc9ff7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 686.810920] env[69784]: DEBUG oslo_concurrency.lockutils [req-1502b6b6-ab01-4ca5-9e68-ee70ec0ab287 req-98bd07e1-0a88-4c72-bbe8-0b444947a146 service nova] Acquired lock "refresh_cache-c16f7920-23d4-4d77-b70f-118887cc9ff7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 686.811026] env[69784]: DEBUG nova.network.neutron [req-1502b6b6-ab01-4ca5-9e68-ee70ec0ab287 req-98bd07e1-0a88-4c72-bbe8-0b444947a146 service nova] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Refreshing network info cache for port a7b68e59-1c65-4cd6-b005-dd73ea0d25fa {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 687.310032] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Successfully created port: cbfa1fad-dba0-4864-8591-a08cb670452e {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.252902] env[69784]: DEBUG nova.network.neutron [req-1502b6b6-ab01-4ca5-9e68-ee70ec0ab287 req-98bd07e1-0a88-4c72-bbe8-0b444947a146 service nova] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Updated VIF entry in instance network info cache for port a7b68e59-1c65-4cd6-b005-dd73ea0d25fa. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 688.253286] env[69784]: DEBUG nova.network.neutron [req-1502b6b6-ab01-4ca5-9e68-ee70ec0ab287 req-98bd07e1-0a88-4c72-bbe8-0b444947a146 service nova] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Updating instance_info_cache with network_info: [{"id": "a7b68e59-1c65-4cd6-b005-dd73ea0d25fa", "address": "fa:16:3e:78:cd:b7", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7b68e59-1c", "ovs_interfaceid": "a7b68e59-1c65-4cd6-b005-dd73ea0d25fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.266828] env[69784]: DEBUG oslo_concurrency.lockutils [req-1502b6b6-ab01-4ca5-9e68-ee70ec0ab287 req-98bd07e1-0a88-4c72-bbe8-0b444947a146 service nova] Releasing lock "refresh_cache-c16f7920-23d4-4d77-b70f-118887cc9ff7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 688.685099] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Successfully updated port: 4a0ab72b-29eb-42d0-b506-56fd405371c3 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 688.701454] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "refresh_cache-c43ca674-06b8-4b5d-a709-2df095b509f3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 688.702365] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "refresh_cache-c43ca674-06b8-4b5d-a709-2df095b509f3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 688.702365] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 688.840279] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 689.582309] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Successfully updated port: cbfa1fad-dba0-4864-8591-a08cb670452e {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.593071] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "refresh_cache-0d0d1503-5522-4c0d-9096-2f25ed0fd7df" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 689.595176] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "refresh_cache-0d0d1503-5522-4c0d-9096-2f25ed0fd7df" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 689.596634] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 689.621009] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Updating instance_info_cache with network_info: [{"id": "4a0ab72b-29eb-42d0-b506-56fd405371c3", "address": "fa:16:3e:0e:7d:d2", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a0ab72b-29", "ovs_interfaceid": "4a0ab72b-29eb-42d0-b506-56fd405371c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.640628] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "refresh_cache-c43ca674-06b8-4b5d-a709-2df095b509f3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 689.640628] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Instance network_info: |[{"id": "4a0ab72b-29eb-42d0-b506-56fd405371c3", "address": "fa:16:3e:0e:7d:d2", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a0ab72b-29", "ovs_interfaceid": "4a0ab72b-29eb-42d0-b506-56fd405371c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 689.641095] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:7d:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba27300-88df-4c95-b9e0-a4a8b5039c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a0ab72b-29eb-42d0-b506-56fd405371c3', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.654448] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating folder: Project (a661683342d34a0eb49a0fa3e979028b). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 689.655638] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-413413ec-55b5-4c19-b42f-b753cda7a52c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.660458] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 689.668229] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Created folder: Project (a661683342d34a0eb49a0fa3e979028b) in parent group-v692547. [ 689.668229] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating folder: Instances. Parent ref: group-v692572. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 689.668229] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ce5e48e-b4af-4d49-a712-eae81fd70b0f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.684898] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Created folder: Instances in parent group-v692572. [ 689.685108] env[69784]: DEBUG oslo.service.loopingcall [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.685325] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 689.685839] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06d4bb97-4fb4-4dbf-81b1-03f8abe36402 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.715247] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.715247] env[69784]: value = "task-3467028" [ 689.715247] env[69784]: _type = "Task" [ 689.715247] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.727260] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467028, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.057835] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Updating instance_info_cache with network_info: [{"id": "cbfa1fad-dba0-4864-8591-a08cb670452e", "address": "fa:16:3e:29:59:50", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbfa1fad-db", "ovs_interfaceid": "cbfa1fad-dba0-4864-8591-a08cb670452e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.077389] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "refresh_cache-0d0d1503-5522-4c0d-9096-2f25ed0fd7df" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 690.077882] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Instance network_info: |[{"id": "cbfa1fad-dba0-4864-8591-a08cb670452e", "address": "fa:16:3e:29:59:50", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbfa1fad-db", "ovs_interfaceid": "cbfa1fad-dba0-4864-8591-a08cb670452e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 690.078868] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:59:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba27300-88df-4c95-b9e0-a4a8b5039c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbfa1fad-dba0-4864-8591-a08cb670452e', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.090668] env[69784]: DEBUG oslo.service.loopingcall [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.091333] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 690.091581] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d90e29da-5584-4a64-af94-27fba2e2e0af {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.117563] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.117563] env[69784]: value = "task-3467029" [ 690.117563] env[69784]: _type = "Task" [ 690.117563] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.127204] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467029, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.227664] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467028, 'name': CreateVM_Task, 'duration_secs': 0.49854} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.227664] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 690.227900] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 690.228158] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 690.228721] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 690.229026] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02a3439d-1075-43ec-9cc4-992008718abf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.235956] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 690.235956] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52e7bd33-25b6-1286-9196-6763d828680f" [ 690.235956] env[69784]: _type = "Task" [ 690.235956] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.245235] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52e7bd33-25b6-1286-9196-6763d828680f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.629402] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467029, 'name': CreateVM_Task, 'duration_secs': 0.3278} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.629757] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 690.631446] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 690.754966] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 690.754966] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.754966] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 690.754966] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 690.755234] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 690.755234] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b822b19-a5f9-4433-a285-0e694711e22d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.760024] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 690.760024] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52b1ba52-d8e1-3957-2fe7-963f38859d46" [ 690.760024] env[69784]: _type = "Task" [ 690.760024] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.770015] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52b1ba52-d8e1-3957-2fe7-963f38859d46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.084343] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquiring lock "ecec531e-41d9-47e3-b447-bc658edaea69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 691.084579] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "ecec531e-41d9-47e3-b447-bc658edaea69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 691.192769] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquiring lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 691.192769] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 691.275184] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 691.275462] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 691.275671] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 691.905569] env[69784]: DEBUG oslo_concurrency.lockutils [None req-fbdb0b93-6220-4a8b-ab04-31954bb66143 tempest-VolumesAssistedSnapshotsTest-2036838310 tempest-VolumesAssistedSnapshotsTest-2036838310-project-member] Acquiring lock "f27a066f-307a-4213-b4d3-a861068f4867" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 691.905889] env[69784]: DEBUG oslo_concurrency.lockutils [None req-fbdb0b93-6220-4a8b-ab04-31954bb66143 tempest-VolumesAssistedSnapshotsTest-2036838310 tempest-VolumesAssistedSnapshotsTest-2036838310-project-member] Lock "f27a066f-307a-4213-b4d3-a861068f4867" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 692.493123] env[69784]: DEBUG nova.compute.manager [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Received event network-vif-plugged-4a0ab72b-29eb-42d0-b506-56fd405371c3 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 692.493287] env[69784]: DEBUG oslo_concurrency.lockutils [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] Acquiring lock "c43ca674-06b8-4b5d-a709-2df095b509f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 692.493502] env[69784]: DEBUG oslo_concurrency.lockutils [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] Lock "c43ca674-06b8-4b5d-a709-2df095b509f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 692.493665] env[69784]: DEBUG oslo_concurrency.lockutils [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] Lock "c43ca674-06b8-4b5d-a709-2df095b509f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 692.494357] env[69784]: DEBUG nova.compute.manager [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] No waiting events found dispatching network-vif-plugged-4a0ab72b-29eb-42d0-b506-56fd405371c3 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 692.494357] env[69784]: WARNING nova.compute.manager [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Received unexpected event network-vif-plugged-4a0ab72b-29eb-42d0-b506-56fd405371c3 for instance with vm_state building and task_state spawning. [ 692.495461] env[69784]: DEBUG nova.compute.manager [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Received event network-changed-4a0ab72b-29eb-42d0-b506-56fd405371c3 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 692.495645] env[69784]: DEBUG nova.compute.manager [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Refreshing instance network info cache due to event network-changed-4a0ab72b-29eb-42d0-b506-56fd405371c3. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 692.496010] env[69784]: DEBUG oslo_concurrency.lockutils [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] Acquiring lock "refresh_cache-c43ca674-06b8-4b5d-a709-2df095b509f3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 692.496260] env[69784]: DEBUG oslo_concurrency.lockutils [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] Acquired lock "refresh_cache-c43ca674-06b8-4b5d-a709-2df095b509f3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 692.496438] env[69784]: DEBUG nova.network.neutron [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Refreshing network info cache for port 4a0ab72b-29eb-42d0-b506-56fd405371c3 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 692.816760] env[69784]: DEBUG nova.compute.manager [req-8dee4ddd-0e56-4777-95e6-1ced4b87b165 req-7f0e74f6-5914-44af-bbc0-926abb1271a5 service nova] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Received event network-vif-plugged-cbfa1fad-dba0-4864-8591-a08cb670452e {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 692.817327] env[69784]: DEBUG oslo_concurrency.lockutils [req-8dee4ddd-0e56-4777-95e6-1ced4b87b165 req-7f0e74f6-5914-44af-bbc0-926abb1271a5 service nova] Acquiring lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 692.817748] env[69784]: DEBUG oslo_concurrency.lockutils [req-8dee4ddd-0e56-4777-95e6-1ced4b87b165 req-7f0e74f6-5914-44af-bbc0-926abb1271a5 service nova] Lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 692.819217] env[69784]: DEBUG oslo_concurrency.lockutils [req-8dee4ddd-0e56-4777-95e6-1ced4b87b165 req-7f0e74f6-5914-44af-bbc0-926abb1271a5 service nova] Lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 692.819217] env[69784]: DEBUG nova.compute.manager [req-8dee4ddd-0e56-4777-95e6-1ced4b87b165 req-7f0e74f6-5914-44af-bbc0-926abb1271a5 service nova] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] No waiting events found dispatching network-vif-plugged-cbfa1fad-dba0-4864-8591-a08cb670452e {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 692.819217] env[69784]: WARNING nova.compute.manager [req-8dee4ddd-0e56-4777-95e6-1ced4b87b165 req-7f0e74f6-5914-44af-bbc0-926abb1271a5 service nova] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Received unexpected event network-vif-plugged-cbfa1fad-dba0-4864-8591-a08cb670452e for instance with vm_state building and task_state spawning. [ 693.137736] env[69784]: DEBUG nova.network.neutron [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Updated VIF entry in instance network info cache for port 4a0ab72b-29eb-42d0-b506-56fd405371c3. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 693.138437] env[69784]: DEBUG nova.network.neutron [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Updating instance_info_cache with network_info: [{"id": "4a0ab72b-29eb-42d0-b506-56fd405371c3", "address": "fa:16:3e:0e:7d:d2", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a0ab72b-29", "ovs_interfaceid": "4a0ab72b-29eb-42d0-b506-56fd405371c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.155871] env[69784]: DEBUG oslo_concurrency.lockutils [req-f61071f9-4355-4001-ab02-c4cb426e1088 req-aa391206-99d9-40cc-a6fc-34d6a909497e service nova] Releasing lock "refresh_cache-c43ca674-06b8-4b5d-a709-2df095b509f3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 693.177678] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7e7b763a-7d4a-4835-bb8b-60603a7c722f tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "9b67f788-cf36-4bb3-bdc0-575d2a2178ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 693.177678] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7e7b763a-7d4a-4835-bb8b-60603a7c722f tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "9b67f788-cf36-4bb3-bdc0-575d2a2178ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 695.293944] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6db1a888-bb9e-42b7-9c11-279531c3ecb6 tempest-InstanceActionsTestJSON-95801263 tempest-InstanceActionsTestJSON-95801263-project-member] Acquiring lock "2a8374bd-e901-4b1e-b9ee-e599bd8efed5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 695.294231] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6db1a888-bb9e-42b7-9c11-279531c3ecb6 tempest-InstanceActionsTestJSON-95801263 tempest-InstanceActionsTestJSON-95801263-project-member] Lock "2a8374bd-e901-4b1e-b9ee-e599bd8efed5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 695.982397] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4250ddc5-448a-4e5e-ae90-c16604170e45 tempest-ServersV294TestFqdnHostnames-388572063 tempest-ServersV294TestFqdnHostnames-388572063-project-member] Acquiring lock "14daabdf-7839-4dfb-bbc9-f4ea90e8db8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 695.982825] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4250ddc5-448a-4e5e-ae90-c16604170e45 tempest-ServersV294TestFqdnHostnames-388572063 tempest-ServersV294TestFqdnHostnames-388572063-project-member] Lock "14daabdf-7839-4dfb-bbc9-f4ea90e8db8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 696.714817] env[69784]: DEBUG nova.compute.manager [req-fbef4ed7-8dd8-4cdd-95e8-42810abde805 req-6ba1c6bb-5790-4dcf-8a90-276b3b0a1509 service nova] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Received event network-changed-cbfa1fad-dba0-4864-8591-a08cb670452e {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 696.715077] env[69784]: DEBUG nova.compute.manager [req-fbef4ed7-8dd8-4cdd-95e8-42810abde805 req-6ba1c6bb-5790-4dcf-8a90-276b3b0a1509 service nova] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Refreshing instance network info cache due to event network-changed-cbfa1fad-dba0-4864-8591-a08cb670452e. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 696.715229] env[69784]: DEBUG oslo_concurrency.lockutils [req-fbef4ed7-8dd8-4cdd-95e8-42810abde805 req-6ba1c6bb-5790-4dcf-8a90-276b3b0a1509 service nova] Acquiring lock "refresh_cache-0d0d1503-5522-4c0d-9096-2f25ed0fd7df" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 696.715402] env[69784]: DEBUG oslo_concurrency.lockutils [req-fbef4ed7-8dd8-4cdd-95e8-42810abde805 req-6ba1c6bb-5790-4dcf-8a90-276b3b0a1509 service nova] Acquired lock "refresh_cache-0d0d1503-5522-4c0d-9096-2f25ed0fd7df" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 696.715530] env[69784]: DEBUG nova.network.neutron [req-fbef4ed7-8dd8-4cdd-95e8-42810abde805 req-6ba1c6bb-5790-4dcf-8a90-276b3b0a1509 service nova] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Refreshing network info cache for port cbfa1fad-dba0-4864-8591-a08cb670452e {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 697.480165] env[69784]: DEBUG nova.network.neutron [req-fbef4ed7-8dd8-4cdd-95e8-42810abde805 req-6ba1c6bb-5790-4dcf-8a90-276b3b0a1509 service nova] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Updated VIF entry in instance network info cache for port cbfa1fad-dba0-4864-8591-a08cb670452e. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 697.480545] env[69784]: DEBUG nova.network.neutron [req-fbef4ed7-8dd8-4cdd-95e8-42810abde805 req-6ba1c6bb-5790-4dcf-8a90-276b3b0a1509 service nova] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Updating instance_info_cache with network_info: [{"id": "cbfa1fad-dba0-4864-8591-a08cb670452e", "address": "fa:16:3e:29:59:50", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbfa1fad-db", "ovs_interfaceid": "cbfa1fad-dba0-4864-8591-a08cb670452e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.506778] env[69784]: DEBUG oslo_concurrency.lockutils [req-fbef4ed7-8dd8-4cdd-95e8-42810abde805 req-6ba1c6bb-5790-4dcf-8a90-276b3b0a1509 service nova] Releasing lock "refresh_cache-0d0d1503-5522-4c0d-9096-2f25ed0fd7df" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 700.861068] env[69784]: DEBUG oslo_concurrency.lockutils [None req-69eddb1b-c388-4ff0-ad9e-c3383319658a tempest-TenantUsagesTestJSON-1166063019 tempest-TenantUsagesTestJSON-1166063019-project-member] Acquiring lock "471a1543-ff95-4010-84f9-206730770b1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 700.861471] env[69784]: DEBUG oslo_concurrency.lockutils [None req-69eddb1b-c388-4ff0-ad9e-c3383319658a tempest-TenantUsagesTestJSON-1166063019 tempest-TenantUsagesTestJSON-1166063019-project-member] Lock "471a1543-ff95-4010-84f9-206730770b1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 701.276926] env[69784]: DEBUG oslo_concurrency.lockutils [None req-eb701734-9a98-4311-b106-287e2db4749a tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Acquiring lock "dc623e72-8e80-4aaa-8a0c-363481141255" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 701.276926] env[69784]: DEBUG oslo_concurrency.lockutils [None req-eb701734-9a98-4311-b106-287e2db4749a tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Lock "dc623e72-8e80-4aaa-8a0c-363481141255" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 701.825299] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16dc8a4a-aa0b-4526-b0a6-a366ab0d0f43 tempest-ServerDiagnosticsV248Test-402064256 tempest-ServerDiagnosticsV248Test-402064256-project-member] Acquiring lock "6e54dcb2-6760-4403-8711-75bda2e053a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 701.825299] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16dc8a4a-aa0b-4526-b0a6-a366ab0d0f43 tempest-ServerDiagnosticsV248Test-402064256 tempest-ServerDiagnosticsV248Test-402064256-project-member] Lock "6e54dcb2-6760-4403-8711-75bda2e053a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 707.820946] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d59fb99-922f-4714-955f-8645325f2347 tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] Acquiring lock "c4255a94-f498-4498-a3a3-2867b0f12936" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 707.821291] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d59fb99-922f-4714-955f-8645325f2347 tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] Lock "c4255a94-f498-4498-a3a3-2867b0f12936" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 708.877620] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86636d14-3fa8-449f-b6c5-82710a9f174f tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] Acquiring lock "dc7d9de6-30f9-4f58-9142-6d36e42a3b99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 708.877903] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86636d14-3fa8-449f-b6c5-82710a9f174f tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] Lock "dc7d9de6-30f9-4f58-9142-6d36e42a3b99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 711.123947] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e59dfa0a-9bbe-4d96-8a77-c80e2340595b tempest-ServersTestBootFromVolume-1201909614 tempest-ServersTestBootFromVolume-1201909614-project-member] Acquiring lock "54f60014-0a24-45c5-ab1e-14ab2b3fd8b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 711.124338] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e59dfa0a-9bbe-4d96-8a77-c80e2340595b tempest-ServersTestBootFromVolume-1201909614 tempest-ServersTestBootFromVolume-1201909614-project-member] Lock "54f60014-0a24-45c5-ab1e-14ab2b3fd8b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 712.312765] env[69784]: WARNING oslo_vmware.rw_handles [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 712.312765] env[69784]: ERROR oslo_vmware.rw_handles [ 712.313462] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 712.314506] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 712.314740] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Copying Virtual Disk [datastore1] vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/6a530867-f477-4778-878f-ca7fc0fef30a/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 712.315029] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2570704-979f-4788-9f5f-caf7a4ae77a7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.325121] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Waiting for the task: (returnval){ [ 712.325121] env[69784]: value = "task-3467041" [ 712.325121] env[69784]: _type = "Task" [ 712.325121] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.333400] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Task: {'id': task-3467041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.833925] env[69784]: DEBUG oslo_vmware.exceptions [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 712.833925] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 712.836844] env[69784]: ERROR nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 712.836844] env[69784]: Faults: ['InvalidArgument'] [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] Traceback (most recent call last): [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] yield resources [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] self.driver.spawn(context, instance, image_meta, [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] self._fetch_image_if_missing(context, vi) [ 712.836844] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] image_cache(vi, tmp_image_ds_loc) [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] vm_util.copy_virtual_disk( [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] session._wait_for_task(vmdk_copy_task) [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] return self.wait_for_task(task_ref) [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] return evt.wait() [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] result = hub.switch() [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 712.837289] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] return self.greenlet.switch() [ 712.837679] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 712.837679] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] self.f(*self.args, **self.kw) [ 712.837679] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 712.837679] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] raise exceptions.translate_fault(task_info.error) [ 712.837679] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 712.837679] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] Faults: ['InvalidArgument'] [ 712.837679] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] [ 712.837888] env[69784]: INFO nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Terminating instance [ 712.839834] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 712.839834] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.841020] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 712.841020] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 712.843805] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c671e5a-014f-42ea-ab12-596c3aa9a685 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.847531] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f095b870-f8fd-4a8a-894c-8583c5067a2f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.855731] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 712.855731] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3eb13f00-a4d3-492a-8f26-6a01170bd08c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.858392] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.858562] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 712.859645] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b600021d-c833-4a19-894e-8ae6fcd64a6e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.864557] env[69784]: DEBUG oslo_vmware.api [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Waiting for the task: (returnval){ [ 712.864557] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c01409-35f9-3634-7b10-d97eff02c4f3" [ 712.864557] env[69784]: _type = "Task" [ 712.864557] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.878814] env[69784]: DEBUG oslo_vmware.api [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c01409-35f9-3634-7b10-d97eff02c4f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.931410] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 712.931410] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 712.931410] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Deleting the datastore file [datastore1] e2045977-fc76-4694-b233-ee747d1a1837 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 712.931410] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ff7789b-f377-4332-8be0-f77b14a8d4c7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.937260] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Waiting for the task: (returnval){ [ 712.937260] env[69784]: value = "task-3467043" [ 712.937260] env[69784]: _type = "Task" [ 712.937260] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.949707] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Task: {'id': task-3467043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.382437] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 713.382724] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Creating directory with path [datastore1] vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 713.384282] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51fb879d-0002-4c25-8776-ad5b69317387 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.398318] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Created directory with path [datastore1] vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 713.398530] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Fetch image to [datastore1] vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 713.398694] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 713.399521] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4687da-069c-4aa6-8fa3-314c0de75832 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.410212] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5a1328-f0f7-4cda-b439-90d02e213325 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.418852] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fd4d5b-d206-4a12-9ef1-64f9ad168dd5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.460819] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9a2a93-c0d7-4dc9-843a-2014fbfa92a7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.468624] env[69784]: DEBUG oslo_vmware.api [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Task: {'id': task-3467043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080624} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.470335] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 713.470565] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 713.470739] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 713.471156] env[69784]: INFO nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Took 0.63 seconds to destroy the instance on the hypervisor. [ 713.473360] env[69784]: DEBUG nova.compute.claims [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 713.473532] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 713.474078] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 713.476847] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ae121d7e-ad7e-40b6-a5ca-e79619b4f883 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.499836] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 713.577038] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 713.642460] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 713.642774] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 714.028140] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8f0595-335d-4bf6-a195-b3ab3d790300 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.033851] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2be4d60-e070-40ce-a361-105a43f54c24 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.068389] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb691f6-6eac-4154-8d0a-058b1456bd61 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.076309] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8807e1-6e73-43ac-9bb0-2dd97a2ff7e5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.093134] env[69784]: DEBUG nova.compute.provider_tree [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.113039] env[69784]: DEBUG nova.scheduler.client.report [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 714.126618] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.653s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 714.127518] env[69784]: ERROR nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 714.127518] env[69784]: Faults: ['InvalidArgument'] [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] Traceback (most recent call last): [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] self.driver.spawn(context, instance, image_meta, [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] self._fetch_image_if_missing(context, vi) [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] image_cache(vi, tmp_image_ds_loc) [ 714.127518] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] vm_util.copy_virtual_disk( [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] session._wait_for_task(vmdk_copy_task) [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] return self.wait_for_task(task_ref) [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] return evt.wait() [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] result = hub.switch() [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] return self.greenlet.switch() [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 714.127903] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] self.f(*self.args, **self.kw) [ 714.128367] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 714.128367] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] raise exceptions.translate_fault(task_info.error) [ 714.128367] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 714.128367] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] Faults: ['InvalidArgument'] [ 714.128367] env[69784]: ERROR nova.compute.manager [instance: e2045977-fc76-4694-b233-ee747d1a1837] [ 714.128367] env[69784]: DEBUG nova.compute.utils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 714.135896] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Build of instance e2045977-fc76-4694-b233-ee747d1a1837 was re-scheduled: A specified parameter was not correct: fileType [ 714.135896] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 714.136348] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 714.136595] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 714.136771] env[69784]: DEBUG nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 714.136937] env[69784]: DEBUG nova.network.neutron [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 714.722763] env[69784]: DEBUG nova.network.neutron [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.744381] env[69784]: INFO nova.compute.manager [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] [instance: e2045977-fc76-4694-b233-ee747d1a1837] Took 0.61 seconds to deallocate network for instance. [ 714.875170] env[69784]: INFO nova.scheduler.client.report [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Deleted allocations for instance e2045977-fc76-4694-b233-ee747d1a1837 [ 714.904031] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d297efe-a170-42df-8423-7c81fb32ee8c tempest-ServersTestFqdnHostnames-2086658528 tempest-ServersTestFqdnHostnames-2086658528-project-member] Lock "e2045977-fc76-4694-b233-ee747d1a1837" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.136s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 714.904277] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "e2045977-fc76-4694-b233-ee747d1a1837" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 42.004s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 714.904958] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e2045977-fc76-4694-b233-ee747d1a1837] During sync_power_state the instance has a pending task (spawning). Skip. [ 714.905278] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "e2045977-fc76-4694-b233-ee747d1a1837" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 714.929634] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 714.992243] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 714.992757] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 714.994332] env[69784]: INFO nova.compute.claims [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.447981] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf36e5a-9844-4b6f-8800-d91d9621c358 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.456325] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c8a934-48ed-4acf-ad3e-ec0f169b6e41 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.488118] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31ecdde-189c-4bf2-b9f3-bdebfd8c8e93 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.495669] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125071ba-5a02-4a74-b615-5c5c6968dd0b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.509748] env[69784]: DEBUG nova.compute.provider_tree [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.518922] env[69784]: DEBUG nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 715.533363] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.541s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 715.533852] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 715.575687] env[69784]: DEBUG nova.compute.utils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.580022] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 715.580022] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 715.588428] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 715.663030] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 715.699029] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 715.699029] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 715.699029] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.699318] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 715.699318] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.699318] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 715.699508] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 715.699824] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 715.700155] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 715.700478] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 715.700797] env[69784]: DEBUG nova.virt.hardware [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.703017] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3a3ee7-e9c2-4e67-8441-362e65dfdf31 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.716016] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d682ca42-635f-48e0-a0ff-4a4b0cb67bfe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.719744] env[69784]: DEBUG nova.policy [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83eed4c4733d4a15803de5acdbc3a679', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a661683342d34a0eb49a0fa3e979028b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 717.081194] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Successfully created port: 5d98783f-ca95-43a7-a2c7-5fa7a29fa36c {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.429867] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 718.430182] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 719.780699] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Successfully updated port: 5d98783f-ca95-43a7-a2c7-5fa7a29fa36c {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.791686] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "refresh_cache-65601835-8d30-46b8-b928-b3912d058c6e" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 719.791777] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "refresh_cache-65601835-8d30-46b8-b928-b3912d058c6e" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 719.792379] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 719.898858] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.321536] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Updating instance_info_cache with network_info: [{"id": "5d98783f-ca95-43a7-a2c7-5fa7a29fa36c", "address": "fa:16:3e:6f:7e:7c", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d98783f-ca", "ovs_interfaceid": "5d98783f-ca95-43a7-a2c7-5fa7a29fa36c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.349725] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "refresh_cache-65601835-8d30-46b8-b928-b3912d058c6e" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 720.350296] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Instance network_info: |[{"id": "5d98783f-ca95-43a7-a2c7-5fa7a29fa36c", "address": "fa:16:3e:6f:7e:7c", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d98783f-ca", "ovs_interfaceid": "5d98783f-ca95-43a7-a2c7-5fa7a29fa36c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 720.350866] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:7e:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba27300-88df-4c95-b9e0-a4a8b5039c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d98783f-ca95-43a7-a2c7-5fa7a29fa36c', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 720.363093] env[69784]: DEBUG oslo.service.loopingcall [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.363343] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 720.363668] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c0ca973-2d8d-4f51-b982-a4bb688c220c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.387542] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 720.387542] env[69784]: value = "task-3467044" [ 720.387542] env[69784]: _type = "Task" [ 720.387542] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.396595] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467044, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.400964] env[69784]: DEBUG oslo_concurrency.lockutils [None req-697d64b1-aac9-45ce-b259-3ea401a64f2f tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "ecb67581-1c86-4bff-a063-8433329914c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 720.400964] env[69784]: DEBUG oslo_concurrency.lockutils [None req-697d64b1-aac9-45ce-b259-3ea401a64f2f tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "ecb67581-1c86-4bff-a063-8433329914c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 720.521874] env[69784]: DEBUG nova.compute.manager [req-80a76452-980e-4e07-accf-016a5d746671 req-53c3026f-638c-4133-8902-ddd92ef64910 service nova] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Received event network-vif-plugged-5d98783f-ca95-43a7-a2c7-5fa7a29fa36c {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 720.522531] env[69784]: DEBUG oslo_concurrency.lockutils [req-80a76452-980e-4e07-accf-016a5d746671 req-53c3026f-638c-4133-8902-ddd92ef64910 service nova] Acquiring lock "65601835-8d30-46b8-b928-b3912d058c6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 720.522531] env[69784]: DEBUG oslo_concurrency.lockutils [req-80a76452-980e-4e07-accf-016a5d746671 req-53c3026f-638c-4133-8902-ddd92ef64910 service nova] Lock "65601835-8d30-46b8-b928-b3912d058c6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 720.522531] env[69784]: DEBUG oslo_concurrency.lockutils [req-80a76452-980e-4e07-accf-016a5d746671 req-53c3026f-638c-4133-8902-ddd92ef64910 service nova] Lock "65601835-8d30-46b8-b928-b3912d058c6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 720.522784] env[69784]: DEBUG nova.compute.manager [req-80a76452-980e-4e07-accf-016a5d746671 req-53c3026f-638c-4133-8902-ddd92ef64910 service nova] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] No waiting events found dispatching network-vif-plugged-5d98783f-ca95-43a7-a2c7-5fa7a29fa36c {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 720.522784] env[69784]: WARNING nova.compute.manager [req-80a76452-980e-4e07-accf-016a5d746671 req-53c3026f-638c-4133-8902-ddd92ef64910 service nova] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Received unexpected event network-vif-plugged-5d98783f-ca95-43a7-a2c7-5fa7a29fa36c for instance with vm_state building and task_state spawning. [ 720.898975] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467044, 'name': CreateVM_Task, 'duration_secs': 0.345738} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.900129] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 720.900945] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 720.901068] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 720.901467] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 720.901608] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb6e6ae3-6872-4828-9f3c-415cf35282a4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.908106] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 720.908106] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52b7f573-aa1c-c9cf-c95d-310eb36e7f70" [ 720.908106] env[69784]: _type = "Task" [ 720.908106] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.916813] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52b7f573-aa1c-c9cf-c95d-310eb36e7f70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.164692] env[69784]: DEBUG oslo_concurrency.lockutils [None req-575ea7ba-a31c-4a97-9a96-d556dc8e6904 tempest-ImagesNegativeTestJSON-791461209 tempest-ImagesNegativeTestJSON-791461209-project-member] Acquiring lock "1fd4b343-4e5e-42c3-aa86-33c9866c5f6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 721.164937] env[69784]: DEBUG oslo_concurrency.lockutils [None req-575ea7ba-a31c-4a97-9a96-d556dc8e6904 tempest-ImagesNegativeTestJSON-791461209 tempest-ImagesNegativeTestJSON-791461209-project-member] Lock "1fd4b343-4e5e-42c3-aa86-33c9866c5f6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 721.419573] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 721.419573] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 721.419573] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 722.432754] env[69784]: DEBUG oslo_concurrency.lockutils [None req-40a47e74-c244-4a14-9872-de656504d968 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "d6a9f02a-3d21-4adc-b598-b0586d7e54de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 722.433176] env[69784]: DEBUG oslo_concurrency.lockutils [None req-40a47e74-c244-4a14-9872-de656504d968 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "d6a9f02a-3d21-4adc-b598-b0586d7e54de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 722.553384] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c1f4ead0-b262-4697-9124-e25f77a07e87 tempest-ImagesOneServerTestJSON-1424307921 tempest-ImagesOneServerTestJSON-1424307921-project-member] Acquiring lock "c17163a5-f93c-4899-943a-59f3862eee07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 722.553834] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c1f4ead0-b262-4697-9124-e25f77a07e87 tempest-ImagesOneServerTestJSON-1424307921 tempest-ImagesOneServerTestJSON-1424307921-project-member] Lock "c17163a5-f93c-4899-943a-59f3862eee07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 723.106130] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d488b7f5-41c4-4f0d-9286-b9f4e39a5134 tempest-FloatingIPsAssociationNegativeTestJSON-139635208 tempest-FloatingIPsAssociationNegativeTestJSON-139635208-project-member] Acquiring lock "88d2f936-18bb-4b40-8c72-fb6372c447c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 723.106367] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d488b7f5-41c4-4f0d-9286-b9f4e39a5134 tempest-FloatingIPsAssociationNegativeTestJSON-139635208 tempest-FloatingIPsAssociationNegativeTestJSON-139635208-project-member] Lock "88d2f936-18bb-4b40-8c72-fb6372c447c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 723.983296] env[69784]: DEBUG nova.compute.manager [req-81ca0322-185a-4a96-8290-acd51ca28132 req-d9951fc0-ad44-4279-b8d5-90a0bb349e59 service nova] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Received event network-changed-5d98783f-ca95-43a7-a2c7-5fa7a29fa36c {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 723.983794] env[69784]: DEBUG nova.compute.manager [req-81ca0322-185a-4a96-8290-acd51ca28132 req-d9951fc0-ad44-4279-b8d5-90a0bb349e59 service nova] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Refreshing instance network info cache due to event network-changed-5d98783f-ca95-43a7-a2c7-5fa7a29fa36c. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 723.983794] env[69784]: DEBUG oslo_concurrency.lockutils [req-81ca0322-185a-4a96-8290-acd51ca28132 req-d9951fc0-ad44-4279-b8d5-90a0bb349e59 service nova] Acquiring lock "refresh_cache-65601835-8d30-46b8-b928-b3912d058c6e" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 723.983946] env[69784]: DEBUG oslo_concurrency.lockutils [req-81ca0322-185a-4a96-8290-acd51ca28132 req-d9951fc0-ad44-4279-b8d5-90a0bb349e59 service nova] Acquired lock "refresh_cache-65601835-8d30-46b8-b928-b3912d058c6e" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 723.983993] env[69784]: DEBUG nova.network.neutron [req-81ca0322-185a-4a96-8290-acd51ca28132 req-d9951fc0-ad44-4279-b8d5-90a0bb349e59 service nova] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Refreshing network info cache for port 5d98783f-ca95-43a7-a2c7-5fa7a29fa36c {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 724.488581] env[69784]: DEBUG nova.network.neutron [req-81ca0322-185a-4a96-8290-acd51ca28132 req-d9951fc0-ad44-4279-b8d5-90a0bb349e59 service nova] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Updated VIF entry in instance network info cache for port 5d98783f-ca95-43a7-a2c7-5fa7a29fa36c. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 724.488918] env[69784]: DEBUG nova.network.neutron [req-81ca0322-185a-4a96-8290-acd51ca28132 req-d9951fc0-ad44-4279-b8d5-90a0bb349e59 service nova] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Updating instance_info_cache with network_info: [{"id": "5d98783f-ca95-43a7-a2c7-5fa7a29fa36c", "address": "fa:16:3e:6f:7e:7c", "network": {"id": "f48f61be-e364-4493-9d88-88e91dc124a7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1610168610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a661683342d34a0eb49a0fa3e979028b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba27300-88df-4c95-b9e0-a4a8b5039c3c", "external-id": "nsx-vlan-transportzone-681", "segmentation_id": 681, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d98783f-ca", "ovs_interfaceid": "5d98783f-ca95-43a7-a2c7-5fa7a29fa36c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.500762] env[69784]: DEBUG oslo_concurrency.lockutils [req-81ca0322-185a-4a96-8290-acd51ca28132 req-d9951fc0-ad44-4279-b8d5-90a0bb349e59 service nova] Releasing lock "refresh_cache-65601835-8d30-46b8-b928-b3912d058c6e" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 726.522393] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9827a9be-e086-422a-bc2e-dea7c596f2ea tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] Acquiring lock "050e4912-aa96-43d9-8d5e-6db9b4e35961" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 726.522641] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9827a9be-e086-422a-bc2e-dea7c596f2ea tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] Lock "050e4912-aa96-43d9-8d5e-6db9b4e35961" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 727.493995] env[69784]: DEBUG oslo_concurrency.lockutils [None req-24514a99-1f20-4fff-8d78-b90d61887ec8 tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] Acquiring lock "f0fd2350-cdac-4782-80c6-97c022f26711" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 727.494251] env[69784]: DEBUG oslo_concurrency.lockutils [None req-24514a99-1f20-4fff-8d78-b90d61887ec8 tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] Lock "f0fd2350-cdac-4782-80c6-97c022f26711" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 729.530591] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.563813] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.564010] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.840716] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.840716] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.840716] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.840716] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.839652] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.839938] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 730.839938] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 730.862432] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.862602] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.862759] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.863016] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.863169] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.863294] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.863527] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.863527] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.863659] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.863755] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 730.863876] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 730.864432] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.864578] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 730.864741] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.879538] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 730.879766] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 730.879929] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 730.880113] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 730.881586] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9368a6d0-b14b-4738-b420-4ef1c25855e8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.890384] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8f9930-a0fe-4fc2-842e-544d5ae52d0c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.906723] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2f5498-1702-488a-b420-0922defdf799 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.913608] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef4d5fd-27a8-43f7-b7d9-f7e1d2680172 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.952565] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180922MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 730.952721] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 730.952918] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 731.045472] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c16f7920-23d4-4d77-b70f-118887cc9ff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.058954] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 2a8374bd-e901-4b1e-b9ee-e599bd8efed5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.086171] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 050e4912-aa96-43d9-8d5e-6db9b4e35961 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.097506] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f0fd2350-cdac-4782-80c6-97c022f26711 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.109397] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 54f60014-0a24-45c5-ab1e-14ab2b3fd8b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.109580] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 150d9f3b-b338-4810-ad32-7c8609131ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.122511] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.136905] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance dc623e72-8e80-4aaa-8a0c-363481141255 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.137080] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 15e3e0f5-1967-4f7d-b45f-954845c3dc75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.150080] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.164538] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6e54dcb2-6760-4403-8711-75bda2e053a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.172954] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f27a066f-307a-4213-b4d3-a861068f4867 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.173750] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a927e0ff-6c39-47fd-a082-88d41eb54015 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.184759] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.197925] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d6a9f02a-3d21-4adc-b598-b0586d7e54de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.209193] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecb67581-1c86-4bff-a063-8433329914c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.222623] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 14daabdf-7839-4dfb-bbc9-f4ea90e8db8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.235381] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c4255a94-f498-4498-a3a3-2867b0f12936 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.253143] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance dc7d9de6-30f9-4f58-9142-6d36e42a3b99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.268579] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 88d2f936-18bb-4b40-8c72-fb6372c447c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.338771] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9b67f788-cf36-4bb3-bdc0-575d2a2178ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.351124] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c17163a5-f93c-4899-943a-59f3862eee07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.351124] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 54d12624-7c76-433d-8f1f-3f9bb451e451 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.351124] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.351124] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 65601835-8d30-46b8-b928-b3912d058c6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.351533] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c43ca674-06b8-4b5d-a709-2df095b509f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.363799] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1fd4b343-4e5e-42c3-aa86-33c9866c5f6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.371361] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 28e32097-d536-442f-bcb4-f93b64cb64e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.378690] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 471a1543-ff95-4010-84f9-206730770b1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.424661] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.424847] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cec4f0d1-1b7d-4189-aadf-6f801f52d9bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.425126] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 731.425315] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 731.914715] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2079e9-02f6-43ab-8037-7400cb73d80a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.922989] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378d1c24-0476-44a0-9c78-31d1dd98c6cd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.954720] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371133b2-c82b-4e7e-a266-1e4d8f8bb1d6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.962202] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1aedb11-315e-4154-994e-0bd902309ae8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.976136] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.986063] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 732.002218] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 732.004706] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.049s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 735.808146] env[69784]: DEBUG oslo_concurrency.lockutils [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Acquiring lock "d185e137-119c-4611-9749-00fe4a6bb2c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 735.808146] env[69784]: DEBUG oslo_concurrency.lockutils [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Lock "d185e137-119c-4611-9749-00fe4a6bb2c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 735.838332] env[69784]: DEBUG oslo_concurrency.lockutils [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Acquiring lock "cf2e6054-e94c-48ab-9142-34475d17d2f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 735.838582] env[69784]: DEBUG oslo_concurrency.lockutils [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Lock "cf2e6054-e94c-48ab-9142-34475d17d2f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 738.648533] env[69784]: DEBUG oslo_concurrency.lockutils [None req-171457b6-e761-4b59-99df-1aecbac20367 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Acquiring lock "c1f8b9b1-de4f-47b2-8465-81b7c7139b38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 738.648872] env[69784]: DEBUG oslo_concurrency.lockutils [None req-171457b6-e761-4b59-99df-1aecbac20367 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Lock "c1f8b9b1-de4f-47b2-8465-81b7c7139b38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 745.126432] env[69784]: DEBUG oslo_concurrency.lockutils [None req-112ae074-c27e-46b5-9669-606161082e62 tempest-ServerActionsV293TestJSON-1682501985 tempest-ServerActionsV293TestJSON-1682501985-project-member] Acquiring lock "b5ebb553-9d00-4791-b563-8ddaa325dc88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 745.126766] env[69784]: DEBUG oslo_concurrency.lockutils [None req-112ae074-c27e-46b5-9669-606161082e62 tempest-ServerActionsV293TestJSON-1682501985 tempest-ServerActionsV293TestJSON-1682501985-project-member] Lock "b5ebb553-9d00-4791-b563-8ddaa325dc88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 762.330805] env[69784]: WARNING oslo_vmware.rw_handles [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 762.330805] env[69784]: ERROR oslo_vmware.rw_handles [ 762.331308] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 762.332698] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 762.332926] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Copying Virtual Disk [datastore1] vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/8c809af8-c6f3-4b44-916a-a9f50dbab770/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 762.333228] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c7b2b9c-e9e6-43c5-b47f-fd8260f5f83e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.342561] env[69784]: DEBUG oslo_vmware.api [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Waiting for the task: (returnval){ [ 762.342561] env[69784]: value = "task-3467055" [ 762.342561] env[69784]: _type = "Task" [ 762.342561] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.351069] env[69784]: DEBUG oslo_vmware.api [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Task: {'id': task-3467055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.853397] env[69784]: DEBUG oslo_vmware.exceptions [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 762.853705] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 762.854342] env[69784]: ERROR nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 762.854342] env[69784]: Faults: ['InvalidArgument'] [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Traceback (most recent call last): [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] yield resources [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] self.driver.spawn(context, instance, image_meta, [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] self._vmops.spawn(context, instance, image_meta, injected_files, [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] self._fetch_image_if_missing(context, vi) [ 762.854342] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] image_cache(vi, tmp_image_ds_loc) [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] vm_util.copy_virtual_disk( [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] session._wait_for_task(vmdk_copy_task) [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] return self.wait_for_task(task_ref) [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] return evt.wait() [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] result = hub.switch() [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 762.854670] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] return self.greenlet.switch() [ 762.854997] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 762.854997] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] self.f(*self.args, **self.kw) [ 762.854997] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 762.854997] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] raise exceptions.translate_fault(task_info.error) [ 762.854997] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 762.854997] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Faults: ['InvalidArgument'] [ 762.854997] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] [ 762.854997] env[69784]: INFO nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Terminating instance [ 762.856419] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 762.856562] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 762.857154] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 762.857323] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 762.857508] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddecd7ef-457f-4811-9463-83dc570db4de {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.860067] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d5b507-a101-41ef-90db-7fa2e3d300e6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.866873] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 762.867107] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2519c400-3e35-46fc-8092-c56adfccda6c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.869301] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 762.869471] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 762.870403] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c4484ae-6602-43fd-b176-df4340f514f5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.875401] env[69784]: DEBUG oslo_vmware.api [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for the task: (returnval){ [ 762.875401] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]521810b0-006d-a40a-6867-f51145c34429" [ 762.875401] env[69784]: _type = "Task" [ 762.875401] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.886838] env[69784]: DEBUG oslo_vmware.api [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]521810b0-006d-a40a-6867-f51145c34429, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.933067] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 762.933067] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 762.933067] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Deleting the datastore file [datastore1] 54d12624-7c76-433d-8f1f-3f9bb451e451 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.933067] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d747b3c-9e3e-41d6-aa65-6cef7a113dd4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.939354] env[69784]: DEBUG oslo_vmware.api [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Waiting for the task: (returnval){ [ 762.939354] env[69784]: value = "task-3467057" [ 762.939354] env[69784]: _type = "Task" [ 762.939354] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.948373] env[69784]: DEBUG oslo_vmware.api [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Task: {'id': task-3467057, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.386596] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 763.386858] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Creating directory with path [datastore1] vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.387105] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33f61e80-1348-4d85-8110-fb88c128309b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.399012] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Created directory with path [datastore1] vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.399217] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Fetch image to [datastore1] vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 763.399385] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 763.400142] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2d8a69-328e-44bb-aede-bf8c6ac9e63f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.406729] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e261c88-d631-4d00-9a16-2da76949612d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.415806] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815ea1b5-fcfa-431c-9a26-7f029da98c76 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.449481] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e139190f-8bfd-4f74-9d55-d2c9868d7aaf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.456553] env[69784]: DEBUG oslo_vmware.api [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Task: {'id': task-3467057, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064202} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.458094] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.458332] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 763.458549] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 763.458858] env[69784]: INFO nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Took 0.60 seconds to destroy the instance on the hypervisor. [ 763.461629] env[69784]: DEBUG nova.compute.claims [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 763.461828] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 763.462057] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 763.464704] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dfacf908-9fb5-4802-b590-fd3609b81e9a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.488051] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 763.544981] env[69784]: DEBUG oslo_vmware.rw_handles [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 763.604120] env[69784]: DEBUG oslo_vmware.rw_handles [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 763.604120] env[69784]: DEBUG oslo_vmware.rw_handles [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 763.973885] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9199cbac-1743-437e-b0a1-f3cf5b90dec8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.984021] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6293c03e-d902-45fa-b6db-9e71c5190fba {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.011386] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dfc079-e958-4d67-a156-f9ed87e029e3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.018678] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4643bab-79ce-4b1d-bb41-2d149c696dcb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.031525] env[69784]: DEBUG nova.compute.provider_tree [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.041758] env[69784]: DEBUG nova.scheduler.client.report [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 764.055987] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.594s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 764.056550] env[69784]: ERROR nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 764.056550] env[69784]: Faults: ['InvalidArgument'] [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Traceback (most recent call last): [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] self.driver.spawn(context, instance, image_meta, [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] self._vmops.spawn(context, instance, image_meta, injected_files, [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] self._fetch_image_if_missing(context, vi) [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] image_cache(vi, tmp_image_ds_loc) [ 764.056550] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] vm_util.copy_virtual_disk( [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] session._wait_for_task(vmdk_copy_task) [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] return self.wait_for_task(task_ref) [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] return evt.wait() [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] result = hub.switch() [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] return self.greenlet.switch() [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 764.056885] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] self.f(*self.args, **self.kw) [ 764.057226] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 764.057226] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] raise exceptions.translate_fault(task_info.error) [ 764.057226] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 764.057226] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Faults: ['InvalidArgument'] [ 764.057226] env[69784]: ERROR nova.compute.manager [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] [ 764.057356] env[69784]: DEBUG nova.compute.utils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 764.058600] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Build of instance 54d12624-7c76-433d-8f1f-3f9bb451e451 was re-scheduled: A specified parameter was not correct: fileType [ 764.058600] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 764.058974] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 764.059166] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 764.059337] env[69784]: DEBUG nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 764.059499] env[69784]: DEBUG nova.network.neutron [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 764.573915] env[69784]: DEBUG nova.network.neutron [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.586550] env[69784]: INFO nova.compute.manager [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] Took 0.53 seconds to deallocate network for instance. [ 764.708176] env[69784]: INFO nova.scheduler.client.report [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Deleted allocations for instance 54d12624-7c76-433d-8f1f-3f9bb451e451 [ 764.730135] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7dc375ab-3c00-46b7-bf5f-96201f7ec0fd tempest-FloatingIPsAssociationTestJSON-692328308 tempest-FloatingIPsAssociationTestJSON-692328308-project-member] Lock "54d12624-7c76-433d-8f1f-3f9bb451e451" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.716s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 764.731418] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "54d12624-7c76-433d-8f1f-3f9bb451e451" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 91.831s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 764.731644] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 54d12624-7c76-433d-8f1f-3f9bb451e451] During sync_power_state the instance has a pending task (spawning). Skip. [ 764.731855] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "54d12624-7c76-433d-8f1f-3f9bb451e451" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 764.745346] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 764.797326] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 764.797687] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 764.799220] env[69784]: INFO nova.compute.claims [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.238223] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffc9a7d-1ec1-4d83-8c08-cf15b0625378 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.245908] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ac8939-eb45-452c-afda-6c34f71303d6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.274379] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5ed2f5-9621-4a02-800b-eeafed91f525 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.281603] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419ab5de-6477-46c7-9ad7-5e5af9caa127 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.294506] env[69784]: DEBUG nova.compute.provider_tree [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.303648] env[69784]: DEBUG nova.scheduler.client.report [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 765.316194] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.519s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 765.316725] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 765.348971] env[69784]: DEBUG nova.compute.utils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 765.350096] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Not allocating networking since 'none' was specified. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 765.357913] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 765.418926] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 765.446432] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 765.446780] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 765.446976] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.447206] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 765.447376] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.447713] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 765.447788] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 765.447951] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 765.448175] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 765.448378] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 765.448586] env[69784]: DEBUG nova.virt.hardware [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 765.449534] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b79c840-7a22-4327-be9c-f78d3b3d4e5c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.458254] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f069dda-7c4f-4f14-a538-bc2f18aa5b79 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.473437] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance VIF info [] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.479451] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Creating folder: Project (e2eee50c785242ad8d384b4325b9436d). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 765.479689] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-323672f1-7a16-4a3b-8eb6-b43db9625f39 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.489814] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Created folder: Project (e2eee50c785242ad8d384b4325b9436d) in parent group-v692547. [ 765.489989] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Creating folder: Instances. Parent ref: group-v692585. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 765.490217] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d229bae7-884c-4efb-9ba9-8ce3774a9cb2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.500373] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Created folder: Instances in parent group-v692585. [ 765.500643] env[69784]: DEBUG oslo.service.loopingcall [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.500807] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 765.501050] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9be117b2-e07c-49ae-bf71-b98de00ebc10 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.517851] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.517851] env[69784]: value = "task-3467060" [ 765.517851] env[69784]: _type = "Task" [ 765.517851] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.525056] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467060, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.029521] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467060, 'name': CreateVM_Task, 'duration_secs': 0.292683} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.029821] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 766.030171] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 766.030369] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 766.030715] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 766.031016] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37175220-49d9-4a4a-82cc-b6de1e62a60c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.035769] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Waiting for the task: (returnval){ [ 766.035769] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52d9d0fe-908d-e830-a0cb-2a5baadd9402" [ 766.035769] env[69784]: _type = "Task" [ 766.035769] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.043488] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52d9d0fe-908d-e830-a0cb-2a5baadd9402, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.547552] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 766.547807] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.548023] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 769.087143] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquiring lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 769.087482] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 790.977661] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.977927] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 790.977997] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 790.998369] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.998535] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.998671] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.998799] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.998981] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.999129] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.999253] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.999374] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.999494] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.999611] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 790.999734] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 791.000643] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.000643] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.000643] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.000738] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.000843] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.000986] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 791.001128] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.013112] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 791.013324] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 791.013492] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 791.013637] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 791.016033] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ab8ff2-19b7-414b-822a-ae67ac79ed63 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.023707] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c91e671-e7a8-409b-a0f8-5f3cb83957e1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.037783] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4d41d9-d31e-473b-a7bf-d91d1549cd92 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.044199] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0d061b-8d6f-4db6-8c40-a3b9d001dc35 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.074755] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180955MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 791.075153] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 791.075153] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 791.148582] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c16f7920-23d4-4d77-b70f-118887cc9ff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.164897] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 2a8374bd-e901-4b1e-b9ee-e599bd8efed5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.176284] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance b5ebb553-9d00-4791-b563-8ddaa325dc88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.186422] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 050e4912-aa96-43d9-8d5e-6db9b4e35961 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.196352] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f0fd2350-cdac-4782-80c6-97c022f26711 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.208725] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cf2e6054-e94c-48ab-9142-34475d17d2f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.219451] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d185e137-119c-4611-9749-00fe4a6bb2c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.229135] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 54f60014-0a24-45c5-ab1e-14ab2b3fd8b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.229300] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 150d9f3b-b338-4810-ad32-7c8609131ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.238250] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.247399] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance dc623e72-8e80-4aaa-8a0c-363481141255 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.247541] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 15e3e0f5-1967-4f7d-b45f-954845c3dc75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.256500] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.265479] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6e54dcb2-6760-4403-8711-75bda2e053a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.274191] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.283294] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f27a066f-307a-4213-b4d3-a861068f4867 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.293456] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c1f8b9b1-de4f-47b2-8465-81b7c7139b38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.293607] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a927e0ff-6c39-47fd-a082-88d41eb54015 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.302654] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.311817] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d6a9f02a-3d21-4adc-b598-b0586d7e54de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.320883] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecb67581-1c86-4bff-a063-8433329914c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.330548] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 14daabdf-7839-4dfb-bbc9-f4ea90e8db8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.339679] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c4255a94-f498-4498-a3a3-2867b0f12936 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.348311] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance dc7d9de6-30f9-4f58-9142-6d36e42a3b99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.358466] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 88d2f936-18bb-4b40-8c72-fb6372c447c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.367618] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9b67f788-cf36-4bb3-bdc0-575d2a2178ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.376258] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c17163a5-f93c-4899-943a-59f3862eee07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.376408] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.376537] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 65601835-8d30-46b8-b928-b3912d058c6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.376661] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c43ca674-06b8-4b5d-a709-2df095b509f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.386288] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1fd4b343-4e5e-42c3-aa86-33c9866c5f6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.386399] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 28e32097-d536-442f-bcb4-f93b64cb64e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.395392] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 471a1543-ff95-4010-84f9-206730770b1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.395587] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.395745] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cec4f0d1-1b7d-4189-aadf-6f801f52d9bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.396017] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 791.396216] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 791.766495] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81adea75-4704-4a5d-a661-a9dfcba85d0c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.773829] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0368eb-ba54-47f3-90b3-1d1f3d4bdd8a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.803423] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32c5376-53fd-4864-a947-ef611a3372de {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.810429] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23874356-9569-459f-9594-5857e84040a0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.823913] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.831410] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 791.844561] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 791.844764] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.770s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 792.683944] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 792.684247] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 812.345524] env[69784]: WARNING oslo_vmware.rw_handles [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 812.345524] env[69784]: ERROR oslo_vmware.rw_handles [ 812.346019] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 812.348030] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 812.348030] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Copying Virtual Disk [datastore1] vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/7c1d3539-6312-48f0-b2e5-d763a404c2b6/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 812.348214] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e0bf568-e82b-40cb-b0da-b9a64c7c0661 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.357074] env[69784]: DEBUG oslo_vmware.api [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for the task: (returnval){ [ 812.357074] env[69784]: value = "task-3467061" [ 812.357074] env[69784]: _type = "Task" [ 812.357074] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.364207] env[69784]: DEBUG oslo_vmware.api [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Task: {'id': task-3467061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.869627] env[69784]: DEBUG oslo_vmware.exceptions [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 812.869627] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 812.869627] env[69784]: ERROR nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 812.869627] env[69784]: Faults: ['InvalidArgument'] [ 812.869627] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Traceback (most recent call last): [ 812.869627] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 812.869627] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] yield resources [ 812.869627] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 812.869627] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] self.driver.spawn(context, instance, image_meta, [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] self._fetch_image_if_missing(context, vi) [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] image_cache(vi, tmp_image_ds_loc) [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] vm_util.copy_virtual_disk( [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] session._wait_for_task(vmdk_copy_task) [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] return self.wait_for_task(task_ref) [ 812.869918] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] return evt.wait() [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] result = hub.switch() [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] return self.greenlet.switch() [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] self.f(*self.args, **self.kw) [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] raise exceptions.translate_fault(task_info.error) [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Faults: ['InvalidArgument'] [ 812.870286] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] [ 812.870558] env[69784]: INFO nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Terminating instance [ 812.870558] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 812.870558] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.871331] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 812.871331] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 812.871447] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de538233-8e9f-4764-af4d-1e400199c2f4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.874416] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744db4a2-dfef-4e64-9409-b9e6414d3c61 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.880619] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 812.880867] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47ec271e-0a4d-4798-becc-2ec79acb0b23 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.883113] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.883297] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 812.884413] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb6810ab-2515-4c43-a88a-cd56b921d47c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.888889] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Waiting for the task: (returnval){ [ 812.888889] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]524824cb-fc0a-fa21-c0cf-a64090bfb53c" [ 812.888889] env[69784]: _type = "Task" [ 812.888889] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.901177] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]524824cb-fc0a-fa21-c0cf-a64090bfb53c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.960521] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 812.960711] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 812.960899] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Deleting the datastore file [datastore1] cec4f0d1-1b7d-4189-aadf-6f801f52d9bb {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.961205] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62ae6e1d-5917-4dbe-a5cc-9793e5f08ae9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.968300] env[69784]: DEBUG oslo_vmware.api [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for the task: (returnval){ [ 812.968300] env[69784]: value = "task-3467063" [ 812.968300] env[69784]: _type = "Task" [ 812.968300] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.976310] env[69784]: DEBUG oslo_vmware.api [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Task: {'id': task-3467063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.400106] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 813.400106] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Creating directory with path [datastore1] vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 813.400106] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-026ef968-2b61-4503-8f32-ec6923968882 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.411719] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Created directory with path [datastore1] vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 813.411927] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Fetch image to [datastore1] vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 813.412069] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 813.412755] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a867785-c43d-4c29-ade7-d4e9e30f59e6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.418904] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e674dee-5ad7-472a-8e6a-394474578d6e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.427540] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fac979-a6db-402b-94c7-1fe3dee0d19b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.458555] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4d7817-1686-43a7-8459-a6470a69e64c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.464916] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a6b75787-33dc-48e0-bce7-6bda75bb7feb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.476411] env[69784]: DEBUG oslo_vmware.api [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Task: {'id': task-3467063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063338} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.476633] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.476805] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 813.476965] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 813.477147] env[69784]: INFO nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Took 0.61 seconds to destroy the instance on the hypervisor. [ 813.479375] env[69784]: DEBUG nova.compute.claims [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 813.479375] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 813.479559] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 813.483838] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 813.537283] env[69784]: DEBUG oslo_vmware.rw_handles [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 813.596705] env[69784]: DEBUG oslo_vmware.rw_handles [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 813.596930] env[69784]: DEBUG oslo_vmware.rw_handles [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 813.981043] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b3fe74-eb37-4235-9c4e-52a0a4a18c16 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.989518] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9afcb5-1ac3-46cc-b712-4cba570c82c7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.018029] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88585a66-2f69-4d39-b4d0-9c2bc7122050 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.025473] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30770307-ba88-4490-89d0-711b280ea5ac {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.040889] env[69784]: DEBUG nova.compute.provider_tree [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.049720] env[69784]: DEBUG nova.scheduler.client.report [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 814.066111] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.586s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 814.066163] env[69784]: ERROR nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 814.066163] env[69784]: Faults: ['InvalidArgument'] [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Traceback (most recent call last): [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] self.driver.spawn(context, instance, image_meta, [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] self._fetch_image_if_missing(context, vi) [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] image_cache(vi, tmp_image_ds_loc) [ 814.066163] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] vm_util.copy_virtual_disk( [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] session._wait_for_task(vmdk_copy_task) [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] return self.wait_for_task(task_ref) [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] return evt.wait() [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] result = hub.switch() [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] return self.greenlet.switch() [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 814.066433] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] self.f(*self.args, **self.kw) [ 814.066707] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 814.066707] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] raise exceptions.translate_fault(task_info.error) [ 814.066707] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 814.066707] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Faults: ['InvalidArgument'] [ 814.066707] env[69784]: ERROR nova.compute.manager [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] [ 814.066878] env[69784]: DEBUG nova.compute.utils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 814.068291] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Build of instance cec4f0d1-1b7d-4189-aadf-6f801f52d9bb was re-scheduled: A specified parameter was not correct: fileType [ 814.068291] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 814.068663] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 814.068832] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 814.068983] env[69784]: DEBUG nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 814.069187] env[69784]: DEBUG nova.network.neutron [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 814.452417] env[69784]: DEBUG nova.network.neutron [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.462399] env[69784]: INFO nova.compute.manager [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] Took 0.39 seconds to deallocate network for instance. [ 814.558656] env[69784]: INFO nova.scheduler.client.report [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Deleted allocations for instance cec4f0d1-1b7d-4189-aadf-6f801f52d9bb [ 814.579371] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9585df40-0513-4cd4-bc61-be7d5a861a8e tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.467s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 814.580699] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 141.680s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 814.580887] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: cec4f0d1-1b7d-4189-aadf-6f801f52d9bb] During sync_power_state the instance has a pending task (spawning). Skip. [ 814.581137] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "cec4f0d1-1b7d-4189-aadf-6f801f52d9bb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 814.608228] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 814.663877] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 814.664191] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 814.665691] env[69784]: INFO nova.compute.claims [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.151079] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7727716-f2ab-4642-8015-fea94c39080c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.158856] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6f8021-f200-4d95-a4fc-c86aaab3fe87 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.188179] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88a5518-7410-4f86-8441-ae6a11b1a024 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.197758] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3953720d-4c0e-4461-a692-84626bdff246 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.211049] env[69784]: DEBUG nova.compute.provider_tree [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.219870] env[69784]: DEBUG nova.scheduler.client.report [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 815.244154] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.580s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 815.244855] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 815.284397] env[69784]: DEBUG nova.compute.utils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 815.285418] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 815.285589] env[69784]: DEBUG nova.network.neutron [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 815.294018] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 815.360630] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 815.369170] env[69784]: DEBUG nova.policy [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fbec15855794f51b2ce0551e0e18cf5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29b6e115fb054e45954e31332437ef3b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 815.390950] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T20:00:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='56128026',id=21,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-319191182',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 815.391226] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 815.391387] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.391604] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 815.391755] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.391906] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 815.392156] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 815.392318] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 815.392497] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 815.392662] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 815.392832] env[69784]: DEBUG nova.virt.hardware [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 815.393689] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8253fc2e-3501-4eea-ba94-4c768826ae05 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.401886] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47631b4-b8a4-481f-b62d-c195cd3108c3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.825477] env[69784]: DEBUG nova.network.neutron [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Successfully created port: 15db3c60-7f05-49c4-bfe1-ba9604b9499d {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.145776] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "c23e7041-ca02-4047-84d5-84b62f36b37f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 816.146039] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 816.736177] env[69784]: DEBUG nova.network.neutron [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Successfully updated port: 15db3c60-7f05-49c4-bfe1-ba9604b9499d {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.746595] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquiring lock "refresh_cache-ecec531e-41d9-47e3-b447-bc658edaea69" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 816.746764] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquired lock "refresh_cache-ecec531e-41d9-47e3-b447-bc658edaea69" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 816.746986] env[69784]: DEBUG nova.network.neutron [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 816.757163] env[69784]: DEBUG nova.compute.manager [req-c765037d-7242-43eb-ae4e-c9c673bbf776 req-b3796152-234b-4b96-8793-449e44947da3 service nova] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Received event network-vif-plugged-15db3c60-7f05-49c4-bfe1-ba9604b9499d {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 816.757406] env[69784]: DEBUG oslo_concurrency.lockutils [req-c765037d-7242-43eb-ae4e-c9c673bbf776 req-b3796152-234b-4b96-8793-449e44947da3 service nova] Acquiring lock "ecec531e-41d9-47e3-b447-bc658edaea69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 816.757612] env[69784]: DEBUG oslo_concurrency.lockutils [req-c765037d-7242-43eb-ae4e-c9c673bbf776 req-b3796152-234b-4b96-8793-449e44947da3 service nova] Lock "ecec531e-41d9-47e3-b447-bc658edaea69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 816.757780] env[69784]: DEBUG oslo_concurrency.lockutils [req-c765037d-7242-43eb-ae4e-c9c673bbf776 req-b3796152-234b-4b96-8793-449e44947da3 service nova] Lock "ecec531e-41d9-47e3-b447-bc658edaea69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 816.757947] env[69784]: DEBUG nova.compute.manager [req-c765037d-7242-43eb-ae4e-c9c673bbf776 req-b3796152-234b-4b96-8793-449e44947da3 service nova] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] No waiting events found dispatching network-vif-plugged-15db3c60-7f05-49c4-bfe1-ba9604b9499d {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 816.758149] env[69784]: WARNING nova.compute.manager [req-c765037d-7242-43eb-ae4e-c9c673bbf776 req-b3796152-234b-4b96-8793-449e44947da3 service nova] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Received unexpected event network-vif-plugged-15db3c60-7f05-49c4-bfe1-ba9604b9499d for instance with vm_state building and task_state spawning. [ 816.794840] env[69784]: DEBUG nova.network.neutron [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 817.028245] env[69784]: DEBUG nova.network.neutron [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Updating instance_info_cache with network_info: [{"id": "15db3c60-7f05-49c4-bfe1-ba9604b9499d", "address": "fa:16:3e:1a:3e:30", "network": {"id": "13d7badd-9153-425b-b185-a3a9636579ab", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-812632440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29b6e115fb054e45954e31332437ef3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15db3c60-7f", "ovs_interfaceid": "15db3c60-7f05-49c4-bfe1-ba9604b9499d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.041209] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Releasing lock "refresh_cache-ecec531e-41d9-47e3-b447-bc658edaea69" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 817.041509] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Instance network_info: |[{"id": "15db3c60-7f05-49c4-bfe1-ba9604b9499d", "address": "fa:16:3e:1a:3e:30", "network": {"id": "13d7badd-9153-425b-b185-a3a9636579ab", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-812632440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29b6e115fb054e45954e31332437ef3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15db3c60-7f", "ovs_interfaceid": "15db3c60-7f05-49c4-bfe1-ba9604b9499d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 817.041907] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:3e:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24210a23-d8ac-4f4f-84ac-dc0636de9a72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15db3c60-7f05-49c4-bfe1-ba9604b9499d', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 817.049646] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Creating folder: Project (29b6e115fb054e45954e31332437ef3b). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 817.050197] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-185c63f2-4e7e-434e-84d5-059178a6e3c1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.062902] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Created folder: Project (29b6e115fb054e45954e31332437ef3b) in parent group-v692547. [ 817.063117] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Creating folder: Instances. Parent ref: group-v692588. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 817.063337] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-793eaddc-8e50-4204-8054-351fef804f71 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.073954] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Created folder: Instances in parent group-v692588. [ 817.074208] env[69784]: DEBUG oslo.service.loopingcall [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.074392] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 817.074588] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcabc192-f898-4462-8921-3ff4c7a69ff9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.093888] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 817.093888] env[69784]: value = "task-3467066" [ 817.093888] env[69784]: _type = "Task" [ 817.093888] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.102172] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467066, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.603516] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467066, 'name': CreateVM_Task, 'duration_secs': 0.306577} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.603733] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 817.604428] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 817.604623] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 817.604973] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 817.605268] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8ae4177-9717-45fa-a995-959d0d98103d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.609720] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Waiting for the task: (returnval){ [ 817.609720] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]529d04f4-7cd6-1744-4956-b88af3bb781b" [ 817.609720] env[69784]: _type = "Task" [ 817.609720] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.617365] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]529d04f4-7cd6-1744-4956-b88af3bb781b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.120039] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 818.120333] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 818.120520] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 818.821458] env[69784]: DEBUG nova.compute.manager [req-9bc4b98f-ed5c-4628-8219-b2ec88ab3e9a req-3a4c41f3-a405-437f-8183-b22bdbdff60b service nova] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Received event network-changed-15db3c60-7f05-49c4-bfe1-ba9604b9499d {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 818.821656] env[69784]: DEBUG nova.compute.manager [req-9bc4b98f-ed5c-4628-8219-b2ec88ab3e9a req-3a4c41f3-a405-437f-8183-b22bdbdff60b service nova] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Refreshing instance network info cache due to event network-changed-15db3c60-7f05-49c4-bfe1-ba9604b9499d. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 818.821880] env[69784]: DEBUG oslo_concurrency.lockutils [req-9bc4b98f-ed5c-4628-8219-b2ec88ab3e9a req-3a4c41f3-a405-437f-8183-b22bdbdff60b service nova] Acquiring lock "refresh_cache-ecec531e-41d9-47e3-b447-bc658edaea69" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 818.821997] env[69784]: DEBUG oslo_concurrency.lockutils [req-9bc4b98f-ed5c-4628-8219-b2ec88ab3e9a req-3a4c41f3-a405-437f-8183-b22bdbdff60b service nova] Acquired lock "refresh_cache-ecec531e-41d9-47e3-b447-bc658edaea69" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 818.822177] env[69784]: DEBUG nova.network.neutron [req-9bc4b98f-ed5c-4628-8219-b2ec88ab3e9a req-3a4c41f3-a405-437f-8183-b22bdbdff60b service nova] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Refreshing network info cache for port 15db3c60-7f05-49c4-bfe1-ba9604b9499d {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 819.137095] env[69784]: DEBUG nova.network.neutron [req-9bc4b98f-ed5c-4628-8219-b2ec88ab3e9a req-3a4c41f3-a405-437f-8183-b22bdbdff60b service nova] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Updated VIF entry in instance network info cache for port 15db3c60-7f05-49c4-bfe1-ba9604b9499d. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 819.137504] env[69784]: DEBUG nova.network.neutron [req-9bc4b98f-ed5c-4628-8219-b2ec88ab3e9a req-3a4c41f3-a405-437f-8183-b22bdbdff60b service nova] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Updating instance_info_cache with network_info: [{"id": "15db3c60-7f05-49c4-bfe1-ba9604b9499d", "address": "fa:16:3e:1a:3e:30", "network": {"id": "13d7badd-9153-425b-b185-a3a9636579ab", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-812632440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29b6e115fb054e45954e31332437ef3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15db3c60-7f", "ovs_interfaceid": "15db3c60-7f05-49c4-bfe1-ba9604b9499d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.147493] env[69784]: DEBUG oslo_concurrency.lockutils [req-9bc4b98f-ed5c-4628-8219-b2ec88ab3e9a req-3a4c41f3-a405-437f-8183-b22bdbdff60b service nova] Releasing lock "refresh_cache-ecec531e-41d9-47e3-b447-bc658edaea69" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 849.841154] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.841167] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.841167] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.841167] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 851.839612] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.840952] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.840952] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.851781] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 851.852080] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 851.852150] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 851.852304] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 851.853444] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ede984f-c336-402e-bcd1-e721b44fc3db {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.862482] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0d65a6-4c16-4519-9792-d33e16539eb4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.876533] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4719005-bac2-4385-a46a-f1d5aeb6f83c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.882976] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef256085-408c-4733-8b5c-1a989774b0ce {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.913268] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180955MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 851.913432] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 851.913629] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 851.988777] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c16f7920-23d4-4d77-b70f-118887cc9ff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 851.999096] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 2a8374bd-e901-4b1e-b9ee-e599bd8efed5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.009399] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance b5ebb553-9d00-4791-b563-8ddaa325dc88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.022232] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 050e4912-aa96-43d9-8d5e-6db9b4e35961 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.031661] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f0fd2350-cdac-4782-80c6-97c022f26711 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.041707] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cf2e6054-e94c-48ab-9142-34475d17d2f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.055756] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d185e137-119c-4611-9749-00fe4a6bb2c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.067399] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 54f60014-0a24-45c5-ab1e-14ab2b3fd8b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.067588] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 150d9f3b-b338-4810-ad32-7c8609131ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.067722] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.082425] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance dc623e72-8e80-4aaa-8a0c-363481141255 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.082608] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 15e3e0f5-1967-4f7d-b45f-954845c3dc75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.093113] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.103022] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6e54dcb2-6760-4403-8711-75bda2e053a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.112557] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.122457] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f27a066f-307a-4213-b4d3-a861068f4867 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.131617] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c1f8b9b1-de4f-47b2-8465-81b7c7139b38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.131796] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a927e0ff-6c39-47fd-a082-88d41eb54015 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.141159] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.152035] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d6a9f02a-3d21-4adc-b598-b0586d7e54de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.161438] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecb67581-1c86-4bff-a063-8433329914c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.171181] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 14daabdf-7839-4dfb-bbc9-f4ea90e8db8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.181070] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c4255a94-f498-4498-a3a3-2867b0f12936 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.190649] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance dc7d9de6-30f9-4f58-9142-6d36e42a3b99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.200469] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 88d2f936-18bb-4b40-8c72-fb6372c447c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.209763] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9b67f788-cf36-4bb3-bdc0-575d2a2178ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.220694] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c17163a5-f93c-4899-943a-59f3862eee07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.220835] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.220967] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 65601835-8d30-46b8-b928-b3912d058c6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.221107] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c43ca674-06b8-4b5d-a709-2df095b509f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.230549] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1fd4b343-4e5e-42c3-aa86-33c9866c5f6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.230700] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 28e32097-d536-442f-bcb4-f93b64cb64e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.240366] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 471a1543-ff95-4010-84f9-206730770b1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.240598] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.249413] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 852.249717] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 852.249869] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 852.623347] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90d3b61-3314-4cfb-8dc8-2e0f32b3eaff {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.631361] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3bd536-3ded-40ef-9564-d97664f61f2c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.663605] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad6f36b-bfed-44a9-a0b2-4744506abcf8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.675185] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00eeaa5d-d9de-4290-bad9-fa353f1fcde1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.688796] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.699143] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 852.712704] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 852.712898] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.799s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 853.708632] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 853.709131] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 853.709131] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 853.709131] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 853.732709] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.732709] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.732709] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.732709] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.732709] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.732890] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.732890] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.732969] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.733108] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.733217] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 853.733335] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 853.839877] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 853.862797] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.071824] env[69784]: DEBUG oslo_concurrency.lockutils [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquiring lock "150d9f3b-b338-4810-ad32-7c8609131ce4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 860.611592] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquiring lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 861.535923] env[69784]: DEBUG oslo_concurrency.lockutils [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "28e32097-d536-442f-bcb4-f93b64cb64e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 862.261637] env[69784]: DEBUG oslo_concurrency.lockutils [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "a927e0ff-6c39-47fd-a082-88d41eb54015" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 862.364629] env[69784]: WARNING oslo_vmware.rw_handles [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 862.364629] env[69784]: ERROR oslo_vmware.rw_handles [ 862.364994] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 862.366746] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 862.366989] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Copying Virtual Disk [datastore1] vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/f1980e5f-19ef-4892-a837-b8f2aea5978c/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 862.367275] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d4993b2-388b-4851-a4c3-14b5dcae3c7f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.374860] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Waiting for the task: (returnval){ [ 862.374860] env[69784]: value = "task-3467067" [ 862.374860] env[69784]: _type = "Task" [ 862.374860] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.382724] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Task: {'id': task-3467067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.884893] env[69784]: DEBUG oslo_vmware.exceptions [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 862.885221] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 862.885847] env[69784]: ERROR nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 862.885847] env[69784]: Faults: ['InvalidArgument'] [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Traceback (most recent call last): [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] yield resources [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] self.driver.spawn(context, instance, image_meta, [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] self._fetch_image_if_missing(context, vi) [ 862.885847] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] image_cache(vi, tmp_image_ds_loc) [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] vm_util.copy_virtual_disk( [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] session._wait_for_task(vmdk_copy_task) [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] return self.wait_for_task(task_ref) [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] return evt.wait() [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] result = hub.switch() [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 862.886260] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] return self.greenlet.switch() [ 862.886652] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 862.886652] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] self.f(*self.args, **self.kw) [ 862.886652] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 862.886652] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] raise exceptions.translate_fault(task_info.error) [ 862.886652] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 862.886652] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Faults: ['InvalidArgument'] [ 862.886652] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] [ 862.886652] env[69784]: INFO nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Terminating instance [ 862.887918] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 862.888171] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.888380] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdb71f31-ab36-48a9-a147-4587e5654fe1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.892757] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 862.892955] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 862.893698] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c3dcb4-12a7-43de-8db0-d9725ee15195 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.900150] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 862.900392] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a221c7a-e9ff-459b-ac5f-05b7faf47f58 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.902642] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.902832] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 862.903761] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c597dac2-6f58-46c6-b69d-663de5dfdd50 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.908426] env[69784]: DEBUG oslo_vmware.api [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Waiting for the task: (returnval){ [ 862.908426] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5274fa49-3ccd-872a-c164-f4f25862a00c" [ 862.908426] env[69784]: _type = "Task" [ 862.908426] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.915150] env[69784]: DEBUG oslo_vmware.api [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5274fa49-3ccd-872a-c164-f4f25862a00c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.970345] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 862.970697] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 862.970741] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Deleting the datastore file [datastore1] 150d9f3b-b338-4810-ad32-7c8609131ce4 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.970977] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e942572-76ff-4d3f-8f4c-e3d8194edbab {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.976906] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Waiting for the task: (returnval){ [ 862.976906] env[69784]: value = "task-3467069" [ 862.976906] env[69784]: _type = "Task" [ 862.976906] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.984365] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Task: {'id': task-3467069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.419016] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 863.419016] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Creating directory with path [datastore1] vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.419141] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34db6ab3-f33a-44e6-a8c4-eecef6aadb3a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.430782] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Created directory with path [datastore1] vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.430973] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Fetch image to [datastore1] vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 863.431156] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 863.431918] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602c806a-80ea-44ff-9d44-2b8be13e6a68 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.439465] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fa4ac8-1425-4023-9121-a5a8f60503a3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.448858] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e0dc8c-69f4-4b63-a46c-11d27d9d8de5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.482225] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6974c08b-989e-467c-8bc0-b344b8ab6abc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.489590] env[69784]: DEBUG oslo_vmware.api [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Task: {'id': task-3467069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081935} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.491021] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.491249] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 863.491383] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 863.491559] env[69784]: INFO nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 863.493315] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8243c7d2-4889-4934-a8ec-8bf415e24ab2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.496488] env[69784]: DEBUG nova.compute.claims [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 863.496660] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 863.496869] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 863.513771] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 863.574315] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 863.636227] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 863.636572] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 864.006056] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c023e8a4-f7b4-40c2-bdd6-c29135f8ac51 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.013448] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba8f38a-d663-4913-a50a-7e61e3a843d8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.042556] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c920d682-0371-47e3-9d1e-51528902231a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.049350] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405364df-9bf0-40e8-9394-e949adf14e79 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.063144] env[69784]: DEBUG nova.compute.provider_tree [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.073723] env[69784]: DEBUG nova.scheduler.client.report [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 864.088692] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.592s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 864.089872] env[69784]: ERROR nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 864.089872] env[69784]: Faults: ['InvalidArgument'] [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Traceback (most recent call last): [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] self.driver.spawn(context, instance, image_meta, [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] self._fetch_image_if_missing(context, vi) [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] image_cache(vi, tmp_image_ds_loc) [ 864.089872] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] vm_util.copy_virtual_disk( [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] session._wait_for_task(vmdk_copy_task) [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] return self.wait_for_task(task_ref) [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] return evt.wait() [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] result = hub.switch() [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] return self.greenlet.switch() [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 864.090293] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] self.f(*self.args, **self.kw) [ 864.090609] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 864.090609] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] raise exceptions.translate_fault(task_info.error) [ 864.090609] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 864.090609] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Faults: ['InvalidArgument'] [ 864.090609] env[69784]: ERROR nova.compute.manager [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] [ 864.090609] env[69784]: DEBUG nova.compute.utils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 864.091437] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Build of instance 150d9f3b-b338-4810-ad32-7c8609131ce4 was re-scheduled: A specified parameter was not correct: fileType [ 864.091437] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 864.091807] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 864.091978] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 864.092145] env[69784]: DEBUG nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 864.092302] env[69784]: DEBUG nova.network.neutron [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.441826] env[69784]: DEBUG nova.network.neutron [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.457955] env[69784]: INFO nova.compute.manager [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Took 0.37 seconds to deallocate network for instance. [ 864.578032] env[69784]: INFO nova.scheduler.client.report [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Deleted allocations for instance 150d9f3b-b338-4810-ad32-7c8609131ce4 [ 864.606809] env[69784]: DEBUG oslo_concurrency.lockutils [None req-970c852e-c45b-444d-9cd1-51f208346268 tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.861s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 864.608799] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 191.708s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 864.608990] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] During sync_power_state the instance has a pending task (spawning). Skip. [ 864.609179] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 864.609680] env[69784]: DEBUG oslo_concurrency.lockutils [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 4.538s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 864.609908] env[69784]: DEBUG oslo_concurrency.lockutils [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Acquiring lock "150d9f3b-b338-4810-ad32-7c8609131ce4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 864.610134] env[69784]: DEBUG oslo_concurrency.lockutils [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 864.610253] env[69784]: DEBUG oslo_concurrency.lockutils [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 864.615879] env[69784]: INFO nova.compute.manager [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Terminating instance [ 864.617589] env[69784]: DEBUG nova.compute.manager [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 864.617767] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 864.618037] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57f75dfe-085d-45df-b85f-65f052788cab {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.621009] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 864.630353] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ba3a96-d2a7-45e9-bc32-f26d96f40350 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.659651] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 150d9f3b-b338-4810-ad32-7c8609131ce4 could not be found. [ 864.659876] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 864.660068] env[69784]: INFO nova.compute.manager [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 864.660310] env[69784]: DEBUG oslo.service.loopingcall [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.662786] env[69784]: DEBUG nova.compute.manager [-] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 864.662901] env[69784]: DEBUG nova.network.neutron [-] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.679114] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 864.679424] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 864.681116] env[69784]: INFO nova.compute.claims [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.690754] env[69784]: DEBUG nova.network.neutron [-] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.702522] env[69784]: INFO nova.compute.manager [-] [instance: 150d9f3b-b338-4810-ad32-7c8609131ce4] Took 0.04 seconds to deallocate network for instance. [ 864.826556] env[69784]: DEBUG oslo_concurrency.lockutils [None req-746ede3e-4ae2-498b-9758-d751aded850d tempest-ServerDiagnosticsTest-1322390553 tempest-ServerDiagnosticsTest-1322390553-project-member] Lock "150d9f3b-b338-4810-ad32-7c8609131ce4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 865.207157] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fafeec-32ca-488c-9999-d3abc2e023ca {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.216199] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265b1144-a8f2-4887-b979-16ad9e4fa5c6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.250282] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67818128-5b2f-4f49-a5f8-3d2fa6cb9067 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.258292] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e0f1fa-385d-4e51-b2af-0480194caba9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.272836] env[69784]: DEBUG nova.compute.provider_tree [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.284394] env[69784]: DEBUG nova.scheduler.client.report [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 865.297516] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.618s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 865.298150] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 865.335759] env[69784]: DEBUG nova.compute.utils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 865.337060] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 865.337234] env[69784]: DEBUG nova.network.neutron [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 865.346669] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 865.406574] env[69784]: DEBUG nova.policy [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb9b943ba0034e5ab471df825d92c889', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48568998d67246f388c977ebbc10f62f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 865.409865] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 865.436441] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.436692] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.436848] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.437038] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.437190] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.437335] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.437640] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.437853] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.438047] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.438219] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.438393] env[69784]: DEBUG nova.virt.hardware [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.439251] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fcba25-da0c-449c-bf60-027099ffc8b9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.447535] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffb19d5-3c7a-4d94-b78d-1cbe682b2294 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.784371] env[69784]: DEBUG nova.network.neutron [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Successfully created port: 6caab561-834a-4ab0-b745-08ca86f1bc6d {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.530753] env[69784]: DEBUG nova.compute.manager [req-ad519285-fdb3-460c-9430-1fed633f2f3f req-452ccb6b-f138-4b1e-a3f6-91ff1f5d1619 service nova] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Received event network-vif-plugged-6caab561-834a-4ab0-b745-08ca86f1bc6d {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 866.531064] env[69784]: DEBUG oslo_concurrency.lockutils [req-ad519285-fdb3-460c-9430-1fed633f2f3f req-452ccb6b-f138-4b1e-a3f6-91ff1f5d1619 service nova] Acquiring lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 866.531267] env[69784]: DEBUG oslo_concurrency.lockutils [req-ad519285-fdb3-460c-9430-1fed633f2f3f req-452ccb6b-f138-4b1e-a3f6-91ff1f5d1619 service nova] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 866.531719] env[69784]: DEBUG oslo_concurrency.lockutils [req-ad519285-fdb3-460c-9430-1fed633f2f3f req-452ccb6b-f138-4b1e-a3f6-91ff1f5d1619 service nova] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 866.531719] env[69784]: DEBUG nova.compute.manager [req-ad519285-fdb3-460c-9430-1fed633f2f3f req-452ccb6b-f138-4b1e-a3f6-91ff1f5d1619 service nova] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] No waiting events found dispatching network-vif-plugged-6caab561-834a-4ab0-b745-08ca86f1bc6d {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.531719] env[69784]: WARNING nova.compute.manager [req-ad519285-fdb3-460c-9430-1fed633f2f3f req-452ccb6b-f138-4b1e-a3f6-91ff1f5d1619 service nova] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Received unexpected event network-vif-plugged-6caab561-834a-4ab0-b745-08ca86f1bc6d for instance with vm_state building and task_state spawning. [ 866.554237] env[69784]: DEBUG nova.network.neutron [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Successfully updated port: 6caab561-834a-4ab0-b745-08ca86f1bc6d {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.565909] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquiring lock "refresh_cache-c68ee659-716d-47cc-a6a1-d4c18fa5664f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 866.566064] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquired lock "refresh_cache-c68ee659-716d-47cc-a6a1-d4c18fa5664f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 866.566229] env[69784]: DEBUG nova.network.neutron [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 866.607642] env[69784]: DEBUG nova.network.neutron [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 866.833524] env[69784]: DEBUG nova.network.neutron [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Updating instance_info_cache with network_info: [{"id": "6caab561-834a-4ab0-b745-08ca86f1bc6d", "address": "fa:16:3e:f8:04:51", "network": {"id": "fbbf9398-25fe-403b-9230-4bd9b644410d", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1188139381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48568998d67246f388c977ebbc10f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6caab561-83", "ovs_interfaceid": "6caab561-834a-4ab0-b745-08ca86f1bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.846874] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Releasing lock "refresh_cache-c68ee659-716d-47cc-a6a1-d4c18fa5664f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 866.847204] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Instance network_info: |[{"id": "6caab561-834a-4ab0-b745-08ca86f1bc6d", "address": "fa:16:3e:f8:04:51", "network": {"id": "fbbf9398-25fe-403b-9230-4bd9b644410d", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1188139381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48568998d67246f388c977ebbc10f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6caab561-83", "ovs_interfaceid": "6caab561-834a-4ab0-b745-08ca86f1bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 866.847615] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:04:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6caab561-834a-4ab0-b745-08ca86f1bc6d', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.855723] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Creating folder: Project (48568998d67246f388c977ebbc10f62f). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 866.856387] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe0c059a-2c80-4cb3-ba76-95af0cf707d8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.868064] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Created folder: Project (48568998d67246f388c977ebbc10f62f) in parent group-v692547. [ 866.868064] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Creating folder: Instances. Parent ref: group-v692591. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 866.868064] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e222a2c-8795-48d3-ba75-6af35970344c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.876825] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Created folder: Instances in parent group-v692591. [ 866.877110] env[69784]: DEBUG oslo.service.loopingcall [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.877298] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 866.877657] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8de265dc-e34d-4764-acf8-0f590bd1158c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.898599] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.898599] env[69784]: value = "task-3467072" [ 866.898599] env[69784]: _type = "Task" [ 866.898599] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.906027] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467072, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.112856] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquiring lock "7a640743-734e-4dc0-a965-0a71dddfb918" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 867.113072] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "7a640743-734e-4dc0-a965-0a71dddfb918" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 867.408672] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467072, 'name': CreateVM_Task} progress is 99%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.909493] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467072, 'name': CreateVM_Task} progress is 99%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.410808] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467072, 'name': CreateVM_Task, 'duration_secs': 1.307576} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.411051] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 868.411794] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 868.412165] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 868.412300] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 868.412562] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6220bd14-6421-49de-8205-b7154bc13c19 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.417200] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Waiting for the task: (returnval){ [ 868.417200] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c2120f-beb1-8d78-034f-94b476314557" [ 868.417200] env[69784]: _type = "Task" [ 868.417200] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.426052] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c2120f-beb1-8d78-034f-94b476314557, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.594636] env[69784]: DEBUG nova.compute.manager [req-a7003c7a-5a8b-4851-a22e-bd994fbf4cfe req-aa058133-a4ae-4b03-8b72-eaa069402b28 service nova] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Received event network-changed-6caab561-834a-4ab0-b745-08ca86f1bc6d {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 868.594835] env[69784]: DEBUG nova.compute.manager [req-a7003c7a-5a8b-4851-a22e-bd994fbf4cfe req-aa058133-a4ae-4b03-8b72-eaa069402b28 service nova] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Refreshing instance network info cache due to event network-changed-6caab561-834a-4ab0-b745-08ca86f1bc6d. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 868.595066] env[69784]: DEBUG oslo_concurrency.lockutils [req-a7003c7a-5a8b-4851-a22e-bd994fbf4cfe req-aa058133-a4ae-4b03-8b72-eaa069402b28 service nova] Acquiring lock "refresh_cache-c68ee659-716d-47cc-a6a1-d4c18fa5664f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 868.595211] env[69784]: DEBUG oslo_concurrency.lockutils [req-a7003c7a-5a8b-4851-a22e-bd994fbf4cfe req-aa058133-a4ae-4b03-8b72-eaa069402b28 service nova] Acquired lock "refresh_cache-c68ee659-716d-47cc-a6a1-d4c18fa5664f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 868.595367] env[69784]: DEBUG nova.network.neutron [req-a7003c7a-5a8b-4851-a22e-bd994fbf4cfe req-aa058133-a4ae-4b03-8b72-eaa069402b28 service nova] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Refreshing network info cache for port 6caab561-834a-4ab0-b745-08ca86f1bc6d {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 868.869499] env[69784]: DEBUG nova.network.neutron [req-a7003c7a-5a8b-4851-a22e-bd994fbf4cfe req-aa058133-a4ae-4b03-8b72-eaa069402b28 service nova] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Updated VIF entry in instance network info cache for port 6caab561-834a-4ab0-b745-08ca86f1bc6d. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 868.869904] env[69784]: DEBUG nova.network.neutron [req-a7003c7a-5a8b-4851-a22e-bd994fbf4cfe req-aa058133-a4ae-4b03-8b72-eaa069402b28 service nova] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Updating instance_info_cache with network_info: [{"id": "6caab561-834a-4ab0-b745-08ca86f1bc6d", "address": "fa:16:3e:f8:04:51", "network": {"id": "fbbf9398-25fe-403b-9230-4bd9b644410d", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1188139381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48568998d67246f388c977ebbc10f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6caab561-83", "ovs_interfaceid": "6caab561-834a-4ab0-b745-08ca86f1bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.880293] env[69784]: DEBUG oslo_concurrency.lockutils [req-a7003c7a-5a8b-4851-a22e-bd994fbf4cfe req-aa058133-a4ae-4b03-8b72-eaa069402b28 service nova] Releasing lock "refresh_cache-c68ee659-716d-47cc-a6a1-d4c18fa5664f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 868.928561] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 868.928900] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.929033] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 872.336024] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquiring lock "c16f7920-23d4-4d77-b70f-118887cc9ff7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 885.395146] env[69784]: DEBUG oslo_concurrency.lockutils [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 885.507190] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "65601835-8d30-46b8-b928-b3912d058c6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 885.635261] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "c43ca674-06b8-4b5d-a709-2df095b509f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 886.822939] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "9369b20b-7027-47de-8495-a503ddfb69bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 889.685828] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquiring lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 890.255603] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquiring lock "ecec531e-41d9-47e3-b447-bc658edaea69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 891.788455] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "288af650-a19b-4ce5-baea-013dcaa6e908" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 891.788744] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "288af650-a19b-4ce5-baea-013dcaa6e908" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 901.421218] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f6225231-2c3a-4e91-882d-4fc844405a50 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Acquiring lock "d48c4130-2875-4704-bbb5-75c17fd497c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 901.422142] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f6225231-2c3a-4e91-882d-4fc844405a50 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Lock "d48c4130-2875-4704-bbb5-75c17fd497c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 904.236019] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c36439ae-ff2d-45e9-9293-4ecefca9d159 tempest-ServerActionsTestOtherA-926455394 tempest-ServerActionsTestOtherA-926455394-project-member] Acquiring lock "f763fb3a-9db8-457a-a713-9aa2abf9e440" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 904.236918] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c36439ae-ff2d-45e9-9293-4ecefca9d159 tempest-ServerActionsTestOtherA-926455394 tempest-ServerActionsTestOtherA-926455394-project-member] Lock "f763fb3a-9db8-457a-a713-9aa2abf9e440" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 905.707415] env[69784]: DEBUG oslo_concurrency.lockutils [None req-017e0973-a8ff-4a8f-b16a-801558fcc07c tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "6d0fb95f-194e-49ca-8992-e2cec634a5bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 905.707685] env[69784]: DEBUG oslo_concurrency.lockutils [None req-017e0973-a8ff-4a8f-b16a-801558fcc07c tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "6d0fb95f-194e-49ca-8992-e2cec634a5bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 908.840789] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.841125] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 908.857174] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] There are 0 instances to clean {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 908.857418] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.857575] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances with incomplete migration {{(pid=69784) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 908.868946] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.607902] env[69784]: WARNING oslo_vmware.rw_handles [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 910.607902] env[69784]: ERROR oslo_vmware.rw_handles [ 910.608598] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 910.609998] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 910.610275] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Copying Virtual Disk [datastore1] vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/267bca62-78bc-42fe-aea5-fa6f28751935/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 910.610571] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6b94e97-5cb9-45dd-a8cd-bf75c2ea56ff {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.619027] env[69784]: DEBUG oslo_vmware.api [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Waiting for the task: (returnval){ [ 910.619027] env[69784]: value = "task-3467073" [ 910.619027] env[69784]: _type = "Task" [ 910.619027] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.626702] env[69784]: DEBUG oslo_vmware.api [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Task: {'id': task-3467073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.876630] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.876874] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.877053] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.877203] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 911.134821] env[69784]: DEBUG oslo_vmware.exceptions [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 911.137622] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 911.138253] env[69784]: ERROR nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 911.138253] env[69784]: Faults: ['InvalidArgument'] [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Traceback (most recent call last): [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] yield resources [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] self.driver.spawn(context, instance, image_meta, [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] self._fetch_image_if_missing(context, vi) [ 911.138253] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] image_cache(vi, tmp_image_ds_loc) [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] vm_util.copy_virtual_disk( [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] session._wait_for_task(vmdk_copy_task) [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] return self.wait_for_task(task_ref) [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] return evt.wait() [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] result = hub.switch() [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 911.138602] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] return self.greenlet.switch() [ 911.138919] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 911.138919] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] self.f(*self.args, **self.kw) [ 911.138919] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 911.138919] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] raise exceptions.translate_fault(task_info.error) [ 911.138919] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 911.138919] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Faults: ['InvalidArgument'] [ 911.138919] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] [ 911.138919] env[69784]: INFO nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Terminating instance [ 911.140102] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 911.140329] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.140939] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 911.141355] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 911.141508] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3264191-4b96-4159-905a-5a95c40192cf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.143786] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e778817a-f3ca-468c-8f5b-90217fe84b3e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.152287] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 911.152593] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37bad2a5-64d6-439d-8dc6-ed8dfe94f223 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.154894] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.155074] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 911.156084] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79a7aaf9-1edf-451d-9f24-6998bb823454 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.161071] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 911.161071] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5299c191-0115-4051-5d31-c21e46453105" [ 911.161071] env[69784]: _type = "Task" [ 911.161071] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.175260] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5299c191-0115-4051-5d31-c21e46453105, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.231713] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 911.232018] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 911.232458] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Deleting the datastore file [datastore1] 15e3e0f5-1967-4f7d-b45f-954845c3dc75 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.232547] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4dae0aea-89bd-4143-b5c9-3178ef90a5e3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.240897] env[69784]: DEBUG oslo_vmware.api [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Waiting for the task: (returnval){ [ 911.240897] env[69784]: value = "task-3467075" [ 911.240897] env[69784]: _type = "Task" [ 911.240897] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.248831] env[69784]: DEBUG oslo_vmware.api [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Task: {'id': task-3467075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.678178] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 911.678178] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating directory with path [datastore1] vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.678178] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd270016-6217-4582-a190-f903c07954f4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.688718] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Created directory with path [datastore1] vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.688718] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Fetch image to [datastore1] vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 911.688718] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 911.689786] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84a25a0-9314-41d2-92e3-146e8df9ddd7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.701882] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5d0b35-668e-4351-bab1-9aba6d2686ea {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.713188] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b084a6e-127a-4e0f-8a17-187dde0d529a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.750130] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba568a58-0204-4694-ad05-8304fc425a3c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.758783] env[69784]: DEBUG oslo_vmware.api [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Task: {'id': task-3467075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073519} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.761588] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.761860] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 911.761940] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 911.762165] env[69784]: INFO nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Took 0.62 seconds to destroy the instance on the hypervisor. [ 911.764040] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-93d7660a-5e03-4e6d-ab12-b4000d1738ff {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.766038] env[69784]: DEBUG nova.compute.claims [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 911.766290] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 911.766415] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 911.792233] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 911.841488] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.861138] env[69784]: DEBUG oslo_vmware.rw_handles [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 911.925549] env[69784]: DEBUG oslo_vmware.rw_handles [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 911.925751] env[69784]: DEBUG oslo_vmware.rw_handles [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 912.238542] env[69784]: DEBUG oslo_concurrency.lockutils [None req-441143a3-1bc9-4dff-9df1-fcbb86f4a568 tempest-ServersTestManualDisk-160721202 tempest-ServersTestManualDisk-160721202-project-member] Acquiring lock "47224bae-e259-4517-a379-0561e3812057" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 912.238847] env[69784]: DEBUG oslo_concurrency.lockutils [None req-441143a3-1bc9-4dff-9df1-fcbb86f4a568 tempest-ServersTestManualDisk-160721202 tempest-ServersTestManualDisk-160721202-project-member] Lock "47224bae-e259-4517-a379-0561e3812057" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 912.310028] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7933fe26-d269-485a-9318-43387b918e06 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.318557] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5579e572-4936-4b43-8352-e9d063f05510 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.349194] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c148cb59-73f0-41a7-9323-ea8606ac39aa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.356410] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2ebf34-ac47-44c0-9375-a6079d7d03fe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.369518] env[69784]: DEBUG nova.compute.provider_tree [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.381861] env[69784]: DEBUG nova.scheduler.client.report [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 912.399163] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.632s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 912.399800] env[69784]: ERROR nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 912.399800] env[69784]: Faults: ['InvalidArgument'] [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Traceback (most recent call last): [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] self.driver.spawn(context, instance, image_meta, [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] self._fetch_image_if_missing(context, vi) [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] image_cache(vi, tmp_image_ds_loc) [ 912.399800] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] vm_util.copy_virtual_disk( [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] session._wait_for_task(vmdk_copy_task) [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] return self.wait_for_task(task_ref) [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] return evt.wait() [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] result = hub.switch() [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] return self.greenlet.switch() [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 912.400237] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] self.f(*self.args, **self.kw) [ 912.400690] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 912.400690] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] raise exceptions.translate_fault(task_info.error) [ 912.400690] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 912.400690] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Faults: ['InvalidArgument'] [ 912.400690] env[69784]: ERROR nova.compute.manager [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] [ 912.400690] env[69784]: DEBUG nova.compute.utils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 912.407024] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Build of instance 15e3e0f5-1967-4f7d-b45f-954845c3dc75 was re-scheduled: A specified parameter was not correct: fileType [ 912.407024] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 912.407024] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 912.407024] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 912.407024] env[69784]: DEBUG nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 912.407582] env[69784]: DEBUG nova.network.neutron [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 912.834095] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Acquiring lock "82981529-56c3-43c0-8d33-c2f2b0875bfc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 912.834095] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Lock "82981529-56c3-43c0-8d33-c2f2b0875bfc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 912.836414] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.839301] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.841762] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.850596] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 912.851015] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 912.851199] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 912.851361] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 912.852443] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d5b2bc-b6ef-47ba-beed-d77c264660c0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.861936] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8130165-1bb1-436c-9d64-983ba23f311f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.880943] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6861c71d-9d22-4775-b5cc-e7cf36557a6a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.891820] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5ef6c0-2dc9-43f9-aebc-bfb8b3429d9f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.924967] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180955MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 912.925966] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 912.925966] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 912.937397] env[69784]: DEBUG nova.network.neutron [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.980050] env[69784]: INFO nova.compute.manager [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Took 0.58 seconds to deallocate network for instance. [ 913.128374] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 15e3e0f5-1967-4f7d-b45f-954845c3dc75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.128547] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 28e32097-d536-442f-bcb4-f93b64cb64e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.128680] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a927e0ff-6c39-47fd-a082-88d41eb54015 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.128804] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c16f7920-23d4-4d77-b70f-118887cc9ff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.128924] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c43ca674-06b8-4b5d-a709-2df095b509f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.129052] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.129218] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 65601835-8d30-46b8-b928-b3912d058c6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.129293] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.129462] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.129625] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 913.146136] env[69784]: INFO nova.scheduler.client.report [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Deleted allocations for instance 15e3e0f5-1967-4f7d-b45f-954845c3dc75 [ 913.155522] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.167369] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7683084c-5a7d-4f81-aa59-4a8aad8dec8a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 251.060s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.167697] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 240.267s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 913.167844] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] During sync_power_state the instance has a pending task (spawning). Skip. [ 913.168032] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.168646] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 52.557s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 913.168889] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Acquiring lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 913.169643] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 913.169881] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.171904] env[69784]: INFO nova.compute.manager [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Terminating instance [ 913.173615] env[69784]: DEBUG nova.compute.manager [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 913.173859] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 913.175119] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e60decd-ef78-4af1-83ed-a60661f4701a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.177603] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecb67581-1c86-4bff-a063-8433329914c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.186551] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53fce0c-ec97-49ad-b90d-c949be0c785f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.199329] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1fd4b343-4e5e-42c3-aa86-33c9866c5f6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.205112] env[69784]: DEBUG nova.compute.manager [None req-fbdb0b93-6220-4a8b-ab04-31954bb66143 tempest-VolumesAssistedSnapshotsTest-2036838310 tempest-VolumesAssistedSnapshotsTest-2036838310-project-member] [instance: f27a066f-307a-4213-b4d3-a861068f4867] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.219546] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 15e3e0f5-1967-4f7d-b45f-954845c3dc75 could not be found. [ 913.219800] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 913.220093] env[69784]: INFO nova.compute.manager [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Took 0.05 seconds to destroy the instance on the hypervisor. [ 913.220414] env[69784]: DEBUG oslo.service.loopingcall [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 913.221124] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d6a9f02a-3d21-4adc-b598-b0586d7e54de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.222414] env[69784]: DEBUG nova.compute.manager [-] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 913.222515] env[69784]: DEBUG nova.network.neutron [-] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 913.235128] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c17163a5-f93c-4899-943a-59f3862eee07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.236378] env[69784]: DEBUG nova.compute.manager [None req-fbdb0b93-6220-4a8b-ab04-31954bb66143 tempest-VolumesAssistedSnapshotsTest-2036838310 tempest-VolumesAssistedSnapshotsTest-2036838310-project-member] [instance: f27a066f-307a-4213-b4d3-a861068f4867] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.253675] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 88d2f936-18bb-4b40-8c72-fb6372c447c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.266675] env[69784]: DEBUG nova.network.neutron [-] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.274961] env[69784]: DEBUG oslo_concurrency.lockutils [None req-fbdb0b93-6220-4a8b-ab04-31954bb66143 tempest-VolumesAssistedSnapshotsTest-2036838310 tempest-VolumesAssistedSnapshotsTest-2036838310-project-member] Lock "f27a066f-307a-4213-b4d3-a861068f4867" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.369s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.277657] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 050e4912-aa96-43d9-8d5e-6db9b4e35961 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.290544] env[69784]: DEBUG nova.compute.manager [None req-7e7b763a-7d4a-4835-bb8b-60603a7c722f tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 9b67f788-cf36-4bb3-bdc0-575d2a2178ca] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.297060] env[69784]: INFO nova.compute.manager [-] [instance: 15e3e0f5-1967-4f7d-b45f-954845c3dc75] Took 0.07 seconds to deallocate network for instance. [ 913.313386] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f0fd2350-cdac-4782-80c6-97c022f26711 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.330189] env[69784]: DEBUG nova.compute.manager [None req-7e7b763a-7d4a-4835-bb8b-60603a7c722f tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 9b67f788-cf36-4bb3-bdc0-575d2a2178ca] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.331727] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d185e137-119c-4611-9749-00fe4a6bb2c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.347322] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cf2e6054-e94c-48ab-9142-34475d17d2f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.365505] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7e7b763a-7d4a-4835-bb8b-60603a7c722f tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "9b67f788-cf36-4bb3-bdc0-575d2a2178ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.188s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.366196] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c1f8b9b1-de4f-47b2-8465-81b7c7139b38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.380952] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance b5ebb553-9d00-4791-b563-8ddaa325dc88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.393843] env[69784]: DEBUG nova.compute.manager [None req-6db1a888-bb9e-42b7-9c11-279531c3ecb6 tempest-InstanceActionsTestJSON-95801263 tempest-InstanceActionsTestJSON-95801263-project-member] [instance: 2a8374bd-e901-4b1e-b9ee-e599bd8efed5] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.412773] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.422665] env[69784]: DEBUG nova.compute.manager [None req-6db1a888-bb9e-42b7-9c11-279531c3ecb6 tempest-InstanceActionsTestJSON-95801263 tempest-InstanceActionsTestJSON-95801263-project-member] [instance: 2a8374bd-e901-4b1e-b9ee-e599bd8efed5] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.427290] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.444966] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.450134] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e7c68c8d-e210-410f-a3e3-7ba94688990a tempest-ImagesOneServerNegativeTestJSON-758722556 tempest-ImagesOneServerNegativeTestJSON-758722556-project-member] Lock "15e3e0f5-1967-4f7d-b45f-954845c3dc75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.281s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.456658] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6db1a888-bb9e-42b7-9c11-279531c3ecb6 tempest-InstanceActionsTestJSON-95801263 tempest-InstanceActionsTestJSON-95801263-project-member] Lock "2a8374bd-e901-4b1e-b9ee-e599bd8efed5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.162s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.459263] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.470833] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d48c4130-2875-4704-bbb5-75c17fd497c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.483296] env[69784]: DEBUG nova.compute.manager [None req-4250ddc5-448a-4e5e-ae90-c16604170e45 tempest-ServersV294TestFqdnHostnames-388572063 tempest-ServersV294TestFqdnHostnames-388572063-project-member] [instance: 14daabdf-7839-4dfb-bbc9-f4ea90e8db8e] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.493734] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f763fb3a-9db8-457a-a713-9aa2abf9e440 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.508686] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6d0fb95f-194e-49ca-8992-e2cec634a5bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.518805] env[69784]: DEBUG nova.compute.manager [None req-4250ddc5-448a-4e5e-ae90-c16604170e45 tempest-ServersV294TestFqdnHostnames-388572063 tempest-ServersV294TestFqdnHostnames-388572063-project-member] [instance: 14daabdf-7839-4dfb-bbc9-f4ea90e8db8e] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.524967] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 47224bae-e259-4517-a379-0561e3812057 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.536542] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 82981529-56c3-43c0-8d33-c2f2b0875bfc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 913.536542] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 913.536542] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 913.545820] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4250ddc5-448a-4e5e-ae90-c16604170e45 tempest-ServersV294TestFqdnHostnames-388572063 tempest-ServersV294TestFqdnHostnames-388572063-project-member] Lock "14daabdf-7839-4dfb-bbc9-f4ea90e8db8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.563s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.558065] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing inventories for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 913.561025] env[69784]: DEBUG nova.compute.manager [None req-69eddb1b-c388-4ff0-ad9e-c3383319658a tempest-TenantUsagesTestJSON-1166063019 tempest-TenantUsagesTestJSON-1166063019-project-member] [instance: 471a1543-ff95-4010-84f9-206730770b1f] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.583821] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating ProviderTree inventory for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 913.584029] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating inventory in ProviderTree for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 913.589200] env[69784]: DEBUG nova.compute.manager [None req-69eddb1b-c388-4ff0-ad9e-c3383319658a tempest-TenantUsagesTestJSON-1166063019 tempest-TenantUsagesTestJSON-1166063019-project-member] [instance: 471a1543-ff95-4010-84f9-206730770b1f] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.598666] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing aggregate associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, aggregates: None {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 913.615281] env[69784]: DEBUG oslo_concurrency.lockutils [None req-69eddb1b-c388-4ff0-ad9e-c3383319658a tempest-TenantUsagesTestJSON-1166063019 tempest-TenantUsagesTestJSON-1166063019-project-member] Lock "471a1543-ff95-4010-84f9-206730770b1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.754s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.619343] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing trait associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 913.628086] env[69784]: DEBUG nova.compute.manager [None req-eb701734-9a98-4311-b106-287e2db4749a tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] [instance: dc623e72-8e80-4aaa-8a0c-363481141255] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.657202] env[69784]: DEBUG nova.compute.manager [None req-eb701734-9a98-4311-b106-287e2db4749a tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] [instance: dc623e72-8e80-4aaa-8a0c-363481141255] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.685711] env[69784]: DEBUG oslo_concurrency.lockutils [None req-eb701734-9a98-4311-b106-287e2db4749a tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Lock "dc623e72-8e80-4aaa-8a0c-363481141255" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.409s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.699163] env[69784]: DEBUG nova.compute.manager [None req-16dc8a4a-aa0b-4526-b0a6-a366ab0d0f43 tempest-ServerDiagnosticsV248Test-402064256 tempest-ServerDiagnosticsV248Test-402064256-project-member] [instance: 6e54dcb2-6760-4403-8711-75bda2e053a3] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.727673] env[69784]: DEBUG nova.compute.manager [None req-16dc8a4a-aa0b-4526-b0a6-a366ab0d0f43 tempest-ServerDiagnosticsV248Test-402064256 tempest-ServerDiagnosticsV248Test-402064256-project-member] [instance: 6e54dcb2-6760-4403-8711-75bda2e053a3] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.755921] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16dc8a4a-aa0b-4526-b0a6-a366ab0d0f43 tempest-ServerDiagnosticsV248Test-402064256 tempest-ServerDiagnosticsV248Test-402064256-project-member] Lock "6e54dcb2-6760-4403-8711-75bda2e053a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.930s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.778078] env[69784]: DEBUG nova.compute.manager [None req-8d59fb99-922f-4714-955f-8645325f2347 tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] [instance: c4255a94-f498-4498-a3a3-2867b0f12936] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.807381] env[69784]: DEBUG nova.compute.manager [None req-8d59fb99-922f-4714-955f-8645325f2347 tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] [instance: c4255a94-f498-4498-a3a3-2867b0f12936] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.832978] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8d59fb99-922f-4714-955f-8645325f2347 tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] Lock "c4255a94-f498-4498-a3a3-2867b0f12936" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.012s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.849983] env[69784]: DEBUG nova.compute.manager [None req-86636d14-3fa8-449f-b6c5-82710a9f174f tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] [instance: dc7d9de6-30f9-4f58-9142-6d36e42a3b99] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.888026] env[69784]: DEBUG nova.compute.manager [None req-86636d14-3fa8-449f-b6c5-82710a9f174f tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] [instance: dc7d9de6-30f9-4f58-9142-6d36e42a3b99] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 913.922376] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86636d14-3fa8-449f-b6c5-82710a9f174f tempest-ServersAdminTestJSON-1315016571 tempest-ServersAdminTestJSON-1315016571-project-member] Lock "dc7d9de6-30f9-4f58-9142-6d36e42a3b99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.044s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 913.938538] env[69784]: DEBUG nova.compute.manager [None req-e59dfa0a-9bbe-4d96-8a77-c80e2340595b tempest-ServersTestBootFromVolume-1201909614 tempest-ServersTestBootFromVolume-1201909614-project-member] [instance: 54f60014-0a24-45c5-ab1e-14ab2b3fd8b6] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 913.969027] env[69784]: DEBUG nova.compute.manager [None req-e59dfa0a-9bbe-4d96-8a77-c80e2340595b tempest-ServersTestBootFromVolume-1201909614 tempest-ServersTestBootFromVolume-1201909614-project-member] [instance: 54f60014-0a24-45c5-ab1e-14ab2b3fd8b6] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 914.001254] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e59dfa0a-9bbe-4d96-8a77-c80e2340595b tempest-ServersTestBootFromVolume-1201909614 tempest-ServersTestBootFromVolume-1201909614-project-member] Lock "54f60014-0a24-45c5-ab1e-14ab2b3fd8b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.876s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 914.015631] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 914.094343] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 914.150915] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3d9690-5f20-48e1-a132-c5be0e5da69a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.159604] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf8bbe1-26ab-4b6f-b20c-ef24e800cfe3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.193240] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d14fac9-e190-4d53-9467-4ed59206e3ad {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.200746] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686f0f6e-70d7-4635-8e2e-977ef22ae56a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.215380] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.227039] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 914.243925] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 914.244157] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.319s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 914.244441] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.150s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 914.246336] env[69784]: INFO nova.compute.claims [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.833612] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c8f261-5d3f-4224-a806-cda260fee438 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.841673] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a6661a-f254-4614-b073-a0a0923d5342 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.872912] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b325008-d63c-4987-8df7-c32479ab724f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.880724] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620250b7-f2c6-404a-804e-33becb749a67 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.895987] env[69784]: DEBUG nova.compute.provider_tree [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.905315] env[69784]: DEBUG nova.scheduler.client.report [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 914.923221] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.678s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 914.923598] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 914.971225] env[69784]: DEBUG nova.compute.utils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 914.972571] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 914.972748] env[69784]: DEBUG nova.network.neutron [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 914.984966] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 915.072617] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 915.100376] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 915.100376] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 915.100376] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.100642] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 915.100869] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.101210] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 915.101572] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 915.101872] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 915.102207] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 915.104969] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 915.104969] env[69784]: DEBUG nova.virt.hardware [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 915.104969] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7579e6a5-8ea4-41c9-b17e-89034167d48f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.112443] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b3a59c-671d-4da8-b506-627aab4e0ca4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.150352] env[69784]: DEBUG nova.policy [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e93ffac839d44b3a8ad01731da2ea21e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7b2c0b031793408088474a718c48e512', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 916.249448] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.249826] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 916.250054] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 916.273559] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.273731] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.275127] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.275695] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.276870] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.277061] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.277240] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.277370] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.277495] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.277617] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 916.277742] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 916.278338] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.657746] env[69784]: DEBUG nova.network.neutron [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Successfully created port: 5bda6171-9750-4f5b-8f1c-7a2cf9ddc451 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 918.464307] env[69784]: DEBUG nova.network.neutron [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Successfully updated port: 5bda6171-9750-4f5b-8f1c-7a2cf9ddc451 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 918.479341] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "refresh_cache-16edc743-b24c-4a20-9046-f5d519bd7e9a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 918.479491] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquired lock "refresh_cache-16edc743-b24c-4a20-9046-f5d519bd7e9a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 918.479643] env[69784]: DEBUG nova.network.neutron [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 918.542625] env[69784]: DEBUG nova.network.neutron [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 918.678307] env[69784]: DEBUG nova.compute.manager [req-d6891a79-0f08-4b70-b5f1-90dfb8608dbd req-711d51e5-a546-4430-93d6-67b5de299121 service nova] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Received event network-vif-plugged-5bda6171-9750-4f5b-8f1c-7a2cf9ddc451 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 918.678522] env[69784]: DEBUG oslo_concurrency.lockutils [req-d6891a79-0f08-4b70-b5f1-90dfb8608dbd req-711d51e5-a546-4430-93d6-67b5de299121 service nova] Acquiring lock "16edc743-b24c-4a20-9046-f5d519bd7e9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 918.678738] env[69784]: DEBUG oslo_concurrency.lockutils [req-d6891a79-0f08-4b70-b5f1-90dfb8608dbd req-711d51e5-a546-4430-93d6-67b5de299121 service nova] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 918.678917] env[69784]: DEBUG oslo_concurrency.lockutils [req-d6891a79-0f08-4b70-b5f1-90dfb8608dbd req-711d51e5-a546-4430-93d6-67b5de299121 service nova] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 918.679204] env[69784]: DEBUG nova.compute.manager [req-d6891a79-0f08-4b70-b5f1-90dfb8608dbd req-711d51e5-a546-4430-93d6-67b5de299121 service nova] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] No waiting events found dispatching network-vif-plugged-5bda6171-9750-4f5b-8f1c-7a2cf9ddc451 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 918.679273] env[69784]: WARNING nova.compute.manager [req-d6891a79-0f08-4b70-b5f1-90dfb8608dbd req-711d51e5-a546-4430-93d6-67b5de299121 service nova] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Received unexpected event network-vif-plugged-5bda6171-9750-4f5b-8f1c-7a2cf9ddc451 for instance with vm_state building and task_state spawning. [ 918.785393] env[69784]: DEBUG nova.network.neutron [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Updating instance_info_cache with network_info: [{"id": "5bda6171-9750-4f5b-8f1c-7a2cf9ddc451", "address": "fa:16:3e:f1:d9:64", "network": {"id": "3a2b52bd-8fef-4904-ac5c-e434b717bd37", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1442210437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b2c0b031793408088474a718c48e512", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bda6171-97", "ovs_interfaceid": "5bda6171-9750-4f5b-8f1c-7a2cf9ddc451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.798651] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Releasing lock "refresh_cache-16edc743-b24c-4a20-9046-f5d519bd7e9a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 918.800096] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Instance network_info: |[{"id": "5bda6171-9750-4f5b-8f1c-7a2cf9ddc451", "address": "fa:16:3e:f1:d9:64", "network": {"id": "3a2b52bd-8fef-4904-ac5c-e434b717bd37", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1442210437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b2c0b031793408088474a718c48e512", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bda6171-97", "ovs_interfaceid": "5bda6171-9750-4f5b-8f1c-7a2cf9ddc451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 918.800299] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:d9:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bda6171-9750-4f5b-8f1c-7a2cf9ddc451', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.811409] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Creating folder: Project (7b2c0b031793408088474a718c48e512). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 918.813033] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6168053a-f72b-46ba-a925-4589bc8f7fce {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.824651] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Created folder: Project (7b2c0b031793408088474a718c48e512) in parent group-v692547. [ 918.824875] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Creating folder: Instances. Parent ref: group-v692594. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 918.825101] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72f8ad8b-b5ac-4a31-8d56-be0802ee5f4d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.835291] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Created folder: Instances in parent group-v692594. [ 918.835291] env[69784]: DEBUG oslo.service.loopingcall [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.836773] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 918.836773] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d8f7f09-5078-44e7-83f5-24d381c17b83 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.863555] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.863555] env[69784]: value = "task-3467078" [ 918.863555] env[69784]: _type = "Task" [ 918.863555] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.873983] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467078, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.375314] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467078, 'name': CreateVM_Task, 'duration_secs': 0.298711} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.375937] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 919.376229] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 919.376569] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 919.376852] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 919.377148] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45220a64-cec2-4118-a9f9-4d53c1b54147 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.382864] env[69784]: DEBUG oslo_vmware.api [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Waiting for the task: (returnval){ [ 919.382864] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]522ae053-b98f-3879-314d-37af119b51da" [ 919.382864] env[69784]: _type = "Task" [ 919.382864] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.395249] env[69784]: DEBUG oslo_vmware.api [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]522ae053-b98f-3879-314d-37af119b51da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.898421] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 919.898703] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 919.898903] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 921.102413] env[69784]: DEBUG nova.compute.manager [req-05c7ac12-f948-4688-b21d-19d65ffbb2ef req-cbb76e4f-64c9-461e-9b1d-2abb21a04a39 service nova] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Received event network-changed-5bda6171-9750-4f5b-8f1c-7a2cf9ddc451 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 921.102686] env[69784]: DEBUG nova.compute.manager [req-05c7ac12-f948-4688-b21d-19d65ffbb2ef req-cbb76e4f-64c9-461e-9b1d-2abb21a04a39 service nova] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Refreshing instance network info cache due to event network-changed-5bda6171-9750-4f5b-8f1c-7a2cf9ddc451. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 921.102818] env[69784]: DEBUG oslo_concurrency.lockutils [req-05c7ac12-f948-4688-b21d-19d65ffbb2ef req-cbb76e4f-64c9-461e-9b1d-2abb21a04a39 service nova] Acquiring lock "refresh_cache-16edc743-b24c-4a20-9046-f5d519bd7e9a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 921.102956] env[69784]: DEBUG oslo_concurrency.lockutils [req-05c7ac12-f948-4688-b21d-19d65ffbb2ef req-cbb76e4f-64c9-461e-9b1d-2abb21a04a39 service nova] Acquired lock "refresh_cache-16edc743-b24c-4a20-9046-f5d519bd7e9a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 921.103137] env[69784]: DEBUG nova.network.neutron [req-05c7ac12-f948-4688-b21d-19d65ffbb2ef req-cbb76e4f-64c9-461e-9b1d-2abb21a04a39 service nova] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Refreshing network info cache for port 5bda6171-9750-4f5b-8f1c-7a2cf9ddc451 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 921.961223] env[69784]: DEBUG nova.network.neutron [req-05c7ac12-f948-4688-b21d-19d65ffbb2ef req-cbb76e4f-64c9-461e-9b1d-2abb21a04a39 service nova] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Updated VIF entry in instance network info cache for port 5bda6171-9750-4f5b-8f1c-7a2cf9ddc451. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 921.961772] env[69784]: DEBUG nova.network.neutron [req-05c7ac12-f948-4688-b21d-19d65ffbb2ef req-cbb76e4f-64c9-461e-9b1d-2abb21a04a39 service nova] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Updating instance_info_cache with network_info: [{"id": "5bda6171-9750-4f5b-8f1c-7a2cf9ddc451", "address": "fa:16:3e:f1:d9:64", "network": {"id": "3a2b52bd-8fef-4904-ac5c-e434b717bd37", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1442210437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b2c0b031793408088474a718c48e512", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bda6171-97", "ovs_interfaceid": "5bda6171-9750-4f5b-8f1c-7a2cf9ddc451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.980037] env[69784]: DEBUG oslo_concurrency.lockutils [req-05c7ac12-f948-4688-b21d-19d65ffbb2ef req-cbb76e4f-64c9-461e-9b1d-2abb21a04a39 service nova] Releasing lock "refresh_cache-16edc743-b24c-4a20-9046-f5d519bd7e9a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 922.276763] env[69784]: DEBUG oslo_concurrency.lockutils [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 923.636019] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquiring lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 923.636352] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 927.027026] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1639275f-86ce-4421-96f5-8f6c90d79571 tempest-InstanceActionsV221TestJSON-1929562794 tempest-InstanceActionsV221TestJSON-1929562794-project-member] Acquiring lock "34f889c0-3105-49d3-a2f0-9cf250ab3c4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 927.027026] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1639275f-86ce-4421-96f5-8f6c90d79571 tempest-InstanceActionsV221TestJSON-1929562794 tempest-InstanceActionsV221TestJSON-1929562794-project-member] Lock "34f889c0-3105-49d3-a2f0-9cf250ab3c4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 932.680789] env[69784]: DEBUG oslo_concurrency.lockutils [None req-154224b9-759e-4e2f-a153-09c5b57850bc tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] Acquiring lock "322c8dbc-6c6f-4343-ba39-8301c886210c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 932.681110] env[69784]: DEBUG oslo_concurrency.lockutils [None req-154224b9-759e-4e2f-a153-09c5b57850bc tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] Lock "322c8dbc-6c6f-4343-ba39-8301c886210c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 934.104732] env[69784]: DEBUG oslo_concurrency.lockutils [None req-fc063c46-822a-4452-ba02-bfab27bdf9f0 tempest-AttachInterfacesUnderV243Test-814149851 tempest-AttachInterfacesUnderV243Test-814149851-project-member] Acquiring lock "f8902bba-d976-47c2-b034-a9438b6c467b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 934.104732] env[69784]: DEBUG oslo_concurrency.lockutils [None req-fc063c46-822a-4452-ba02-bfab27bdf9f0 tempest-AttachInterfacesUnderV243Test-814149851 tempest-AttachInterfacesUnderV243Test-814149851-project-member] Lock "f8902bba-d976-47c2-b034-a9438b6c467b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 934.318055] env[69784]: DEBUG oslo_concurrency.lockutils [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Acquiring lock "4091d39a-80ff-43f9-8194-e995838ecb0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 934.318263] env[69784]: DEBUG oslo_concurrency.lockutils [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Lock "4091d39a-80ff-43f9-8194-e995838ecb0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 934.346251] env[69784]: DEBUG oslo_concurrency.lockutils [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Acquiring lock "098647d8-eb8e-4494-8458-857e152e0ff8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 934.346494] env[69784]: DEBUG oslo_concurrency.lockutils [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Lock "098647d8-eb8e-4494-8458-857e152e0ff8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 936.931645] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1b74c4f0-879a-41dd-9de4-78c434f4e000 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Acquiring lock "210a8b8c-13f0-4cb9-9d92-2b018291011b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 936.931645] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1b74c4f0-879a-41dd-9de4-78c434f4e000 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Lock "210a8b8c-13f0-4cb9-9d92-2b018291011b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 948.609341] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2c0f3237-8f24-482b-b118-b21024976172 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "4f0df17c-a95e-424c-8444-8cea31190bae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 948.609657] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2c0f3237-8f24-482b-b118-b21024976172 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "4f0df17c-a95e-424c-8444-8cea31190bae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 961.262801] env[69784]: WARNING oslo_vmware.rw_handles [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 961.262801] env[69784]: ERROR oslo_vmware.rw_handles [ 961.263632] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 961.265054] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 961.265297] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Copying Virtual Disk [datastore1] vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/feb7b70c-5e9e-4d09-9370-ca793c70c8f5/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 961.265582] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3bbcb80-a8bf-4917-9cff-7610779cd201 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.273897] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 961.273897] env[69784]: value = "task-3467079" [ 961.273897] env[69784]: _type = "Task" [ 961.273897] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.281303] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.784773] env[69784]: DEBUG oslo_vmware.exceptions [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 961.785034] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 961.785584] env[69784]: ERROR nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 961.785584] env[69784]: Faults: ['InvalidArgument'] [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Traceback (most recent call last): [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] yield resources [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] self.driver.spawn(context, instance, image_meta, [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] self._fetch_image_if_missing(context, vi) [ 961.785584] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] image_cache(vi, tmp_image_ds_loc) [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] vm_util.copy_virtual_disk( [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] session._wait_for_task(vmdk_copy_task) [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] return self.wait_for_task(task_ref) [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] return evt.wait() [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] result = hub.switch() [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 961.785968] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] return self.greenlet.switch() [ 961.786379] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 961.786379] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] self.f(*self.args, **self.kw) [ 961.786379] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 961.786379] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] raise exceptions.translate_fault(task_info.error) [ 961.786379] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 961.786379] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Faults: ['InvalidArgument'] [ 961.786379] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] [ 961.786379] env[69784]: INFO nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Terminating instance [ 961.787809] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 961.787809] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.787975] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca8d709e-8bad-405a-8737-3d520ab3ed5c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.790127] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 961.790323] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 961.791062] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6292988-2802-4c41-87be-cdbdeb0f990c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.798233] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 961.799266] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-028a5484-57a9-4575-a337-726cd339f02e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.800710] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.800878] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 961.801560] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bedf677d-67bd-4d97-8769-b8cb3a8e956a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.806261] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Waiting for the task: (returnval){ [ 961.806261] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5284a32a-9ac5-6de6-52f1-e164785dc5ee" [ 961.806261] env[69784]: _type = "Task" [ 961.806261] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.814886] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5284a32a-9ac5-6de6-52f1-e164785dc5ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.866762] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 961.867009] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 961.867195] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleting the datastore file [datastore1] 28e32097-d536-442f-bcb4-f93b64cb64e4 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.867464] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3492e77f-c119-4ac2-bf8c-fd566e741d34 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.874088] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 961.874088] env[69784]: value = "task-3467081" [ 961.874088] env[69784]: _type = "Task" [ 961.874088] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.882254] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.317043] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 962.317043] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Creating directory with path [datastore1] vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.317381] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48115e1f-738d-4a48-bf17-63b6a3008292 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.328576] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Created directory with path [datastore1] vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.328774] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Fetch image to [datastore1] vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 962.328942] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 962.329742] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe5d2be-2f41-478d-a912-0aac259c44e2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.336242] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2a4ea4-2431-4439-96df-10acf390b5bd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.345466] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae29160-d176-487c-88e3-a97d66d7b83d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.381888] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17af1fc4-88d7-41cd-9379-166abfd16f25 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.389056] env[69784]: DEBUG oslo_vmware.api [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07594} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.390594] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 962.390833] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 962.390986] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 962.391146] env[69784]: INFO nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 962.393465] env[69784]: DEBUG nova.compute.claims [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 962.393639] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 962.393850] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 962.397109] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4be358d7-e4ad-4336-a493-3502337c5131 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.427429] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 962.531585] env[69784]: DEBUG oslo_vmware.rw_handles [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 962.591112] env[69784]: DEBUG oslo_vmware.rw_handles [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 962.591310] env[69784]: DEBUG oslo_vmware.rw_handles [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 962.811317] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b88a6fe-762b-4d8a-8966-b7a888c7415e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.818779] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a139f696-96ff-4973-9fb7-428ea1e07ca6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.847752] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008006a7-dd1b-43ec-94b5-66b691f7b75d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.855198] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0bc52e-7511-4329-ac13-034f03ab9bfe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.868877] env[69784]: DEBUG nova.compute.provider_tree [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.877483] env[69784]: DEBUG nova.scheduler.client.report [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 962.891736] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.498s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 962.892505] env[69784]: ERROR nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 962.892505] env[69784]: Faults: ['InvalidArgument'] [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Traceback (most recent call last): [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] self.driver.spawn(context, instance, image_meta, [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] self._fetch_image_if_missing(context, vi) [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] image_cache(vi, tmp_image_ds_loc) [ 962.892505] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] vm_util.copy_virtual_disk( [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] session._wait_for_task(vmdk_copy_task) [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] return self.wait_for_task(task_ref) [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] return evt.wait() [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] result = hub.switch() [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] return self.greenlet.switch() [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 962.892873] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] self.f(*self.args, **self.kw) [ 962.893234] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 962.893234] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] raise exceptions.translate_fault(task_info.error) [ 962.893234] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 962.893234] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Faults: ['InvalidArgument'] [ 962.893234] env[69784]: ERROR nova.compute.manager [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] [ 962.893372] env[69784]: DEBUG nova.compute.utils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 962.895157] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Build of instance 28e32097-d536-442f-bcb4-f93b64cb64e4 was re-scheduled: A specified parameter was not correct: fileType [ 962.895157] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 962.895157] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 962.895157] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 962.895442] env[69784]: DEBUG nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 962.895442] env[69784]: DEBUG nova.network.neutron [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 963.434183] env[69784]: DEBUG nova.network.neutron [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.443414] env[69784]: INFO nova.compute.manager [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Took 0.55 seconds to deallocate network for instance. [ 963.545379] env[69784]: INFO nova.scheduler.client.report [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleted allocations for instance 28e32097-d536-442f-bcb4-f93b64cb64e4 [ 963.565029] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f37425cf-3e8d-4f20-9c0d-65fb1c64d0e1 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 300.520s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.566192] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 290.665s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 963.566266] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] During sync_power_state the instance has a pending task (spawning). Skip. [ 963.566420] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.567018] env[69784]: DEBUG oslo_concurrency.lockutils [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 102.031s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 963.567322] env[69784]: DEBUG oslo_concurrency.lockutils [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "28e32097-d536-442f-bcb4-f93b64cb64e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 963.567420] env[69784]: DEBUG oslo_concurrency.lockutils [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 963.567579] env[69784]: DEBUG oslo_concurrency.lockutils [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.569349] env[69784]: INFO nova.compute.manager [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Terminating instance [ 963.571009] env[69784]: DEBUG nova.compute.manager [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 963.571212] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 963.571869] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1397cd66-3280-48f3-ba19-d598d65f7c50 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.581858] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f846bd5-6975-437c-93be-3aafd2b02a97 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.592278] env[69784]: DEBUG nova.compute.manager [None req-697d64b1-aac9-45ce-b259-3ea401a64f2f tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: ecb67581-1c86-4bff-a063-8433329914c7] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 963.613537] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 28e32097-d536-442f-bcb4-f93b64cb64e4 could not be found. [ 963.613739] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 963.613914] env[69784]: INFO nova.compute.manager [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 963.614171] env[69784]: DEBUG oslo.service.loopingcall [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.614392] env[69784]: DEBUG nova.compute.manager [-] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 963.614692] env[69784]: DEBUG nova.network.neutron [-] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 963.617182] env[69784]: DEBUG nova.compute.manager [None req-697d64b1-aac9-45ce-b259-3ea401a64f2f tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: ecb67581-1c86-4bff-a063-8433329914c7] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 963.640386] env[69784]: DEBUG oslo_concurrency.lockutils [None req-697d64b1-aac9-45ce-b259-3ea401a64f2f tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "ecb67581-1c86-4bff-a063-8433329914c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.239s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.641867] env[69784]: DEBUG nova.network.neutron [-] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.649287] env[69784]: INFO nova.compute.manager [-] [instance: 28e32097-d536-442f-bcb4-f93b64cb64e4] Took 0.03 seconds to deallocate network for instance. [ 963.651267] env[69784]: DEBUG nova.compute.manager [None req-575ea7ba-a31c-4a97-9a96-d556dc8e6904 tempest-ImagesNegativeTestJSON-791461209 tempest-ImagesNegativeTestJSON-791461209-project-member] [instance: 1fd4b343-4e5e-42c3-aa86-33c9866c5f6e] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 963.680020] env[69784]: DEBUG nova.compute.manager [None req-575ea7ba-a31c-4a97-9a96-d556dc8e6904 tempest-ImagesNegativeTestJSON-791461209 tempest-ImagesNegativeTestJSON-791461209-project-member] [instance: 1fd4b343-4e5e-42c3-aa86-33c9866c5f6e] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 963.698716] env[69784]: DEBUG oslo_concurrency.lockutils [None req-575ea7ba-a31c-4a97-9a96-d556dc8e6904 tempest-ImagesNegativeTestJSON-791461209 tempest-ImagesNegativeTestJSON-791461209-project-member] Lock "1fd4b343-4e5e-42c3-aa86-33c9866c5f6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.534s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.709660] env[69784]: DEBUG nova.compute.manager [None req-40a47e74-c244-4a14-9872-de656504d968 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: d6a9f02a-3d21-4adc-b598-b0586d7e54de] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 963.738650] env[69784]: DEBUG nova.compute.manager [None req-40a47e74-c244-4a14-9872-de656504d968 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: d6a9f02a-3d21-4adc-b598-b0586d7e54de] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 963.760628] env[69784]: DEBUG oslo_concurrency.lockutils [None req-40a47e74-c244-4a14-9872-de656504d968 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "d6a9f02a-3d21-4adc-b598-b0586d7e54de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.327s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.773912] env[69784]: DEBUG nova.compute.manager [None req-c1f4ead0-b262-4697-9124-e25f77a07e87 tempest-ImagesOneServerTestJSON-1424307921 tempest-ImagesOneServerTestJSON-1424307921-project-member] [instance: c17163a5-f93c-4899-943a-59f3862eee07] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 963.776161] env[69784]: DEBUG oslo_concurrency.lockutils [None req-288e1eb3-e846-4a67-a066-4db8d0e4e4ca tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "28e32097-d536-442f-bcb4-f93b64cb64e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.796812] env[69784]: DEBUG nova.compute.manager [None req-c1f4ead0-b262-4697-9124-e25f77a07e87 tempest-ImagesOneServerTestJSON-1424307921 tempest-ImagesOneServerTestJSON-1424307921-project-member] [instance: c17163a5-f93c-4899-943a-59f3862eee07] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 963.815717] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c1f4ead0-b262-4697-9124-e25f77a07e87 tempest-ImagesOneServerTestJSON-1424307921 tempest-ImagesOneServerTestJSON-1424307921-project-member] Lock "c17163a5-f93c-4899-943a-59f3862eee07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.262s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.827673] env[69784]: DEBUG nova.compute.manager [None req-d488b7f5-41c4-4f0d-9286-b9f4e39a5134 tempest-FloatingIPsAssociationNegativeTestJSON-139635208 tempest-FloatingIPsAssociationNegativeTestJSON-139635208-project-member] [instance: 88d2f936-18bb-4b40-8c72-fb6372c447c5] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 963.849850] env[69784]: DEBUG nova.compute.manager [None req-d488b7f5-41c4-4f0d-9286-b9f4e39a5134 tempest-FloatingIPsAssociationNegativeTestJSON-139635208 tempest-FloatingIPsAssociationNegativeTestJSON-139635208-project-member] [instance: 88d2f936-18bb-4b40-8c72-fb6372c447c5] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 963.868621] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d488b7f5-41c4-4f0d-9286-b9f4e39a5134 tempest-FloatingIPsAssociationNegativeTestJSON-139635208 tempest-FloatingIPsAssociationNegativeTestJSON-139635208-project-member] Lock "88d2f936-18bb-4b40-8c72-fb6372c447c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.762s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.878935] env[69784]: DEBUG nova.compute.manager [None req-9827a9be-e086-422a-bc2e-dea7c596f2ea tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] [instance: 050e4912-aa96-43d9-8d5e-6db9b4e35961] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 963.904255] env[69784]: DEBUG nova.compute.manager [None req-9827a9be-e086-422a-bc2e-dea7c596f2ea tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] [instance: 050e4912-aa96-43d9-8d5e-6db9b4e35961] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 963.926525] env[69784]: DEBUG oslo_concurrency.lockutils [None req-9827a9be-e086-422a-bc2e-dea7c596f2ea tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] Lock "050e4912-aa96-43d9-8d5e-6db9b4e35961" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.404s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.936162] env[69784]: DEBUG nova.compute.manager [None req-24514a99-1f20-4fff-8d78-b90d61887ec8 tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] [instance: f0fd2350-cdac-4782-80c6-97c022f26711] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 963.959561] env[69784]: DEBUG nova.compute.manager [None req-24514a99-1f20-4fff-8d78-b90d61887ec8 tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] [instance: f0fd2350-cdac-4782-80c6-97c022f26711] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 963.981118] env[69784]: DEBUG oslo_concurrency.lockutils [None req-24514a99-1f20-4fff-8d78-b90d61887ec8 tempest-ServerShowV247Test-1917301733 tempest-ServerShowV247Test-1917301733-project-member] Lock "f0fd2350-cdac-4782-80c6-97c022f26711" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.487s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 963.991611] env[69784]: DEBUG nova.compute.manager [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] [instance: d185e137-119c-4611-9749-00fe4a6bb2c8] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 964.014712] env[69784]: DEBUG nova.compute.manager [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] [instance: d185e137-119c-4611-9749-00fe4a6bb2c8] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 964.036296] env[69784]: DEBUG oslo_concurrency.lockutils [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Lock "d185e137-119c-4611-9749-00fe4a6bb2c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.228s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 964.049538] env[69784]: DEBUG nova.compute.manager [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] [instance: cf2e6054-e94c-48ab-9142-34475d17d2f1] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 964.071998] env[69784]: DEBUG nova.compute.manager [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] [instance: cf2e6054-e94c-48ab-9142-34475d17d2f1] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 964.097536] env[69784]: DEBUG oslo_concurrency.lockutils [None req-65fdea04-ac28-4259-ab66-ef5a943fa4d8 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Lock "cf2e6054-e94c-48ab-9142-34475d17d2f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.259s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 964.107805] env[69784]: DEBUG nova.compute.manager [None req-171457b6-e761-4b59-99df-1aecbac20367 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] [instance: c1f8b9b1-de4f-47b2-8465-81b7c7139b38] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 964.133056] env[69784]: DEBUG nova.compute.manager [None req-171457b6-e761-4b59-99df-1aecbac20367 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] [instance: c1f8b9b1-de4f-47b2-8465-81b7c7139b38] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 964.154932] env[69784]: DEBUG oslo_concurrency.lockutils [None req-171457b6-e761-4b59-99df-1aecbac20367 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Lock "c1f8b9b1-de4f-47b2-8465-81b7c7139b38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.506s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 964.166205] env[69784]: DEBUG nova.compute.manager [None req-112ae074-c27e-46b5-9669-606161082e62 tempest-ServerActionsV293TestJSON-1682501985 tempest-ServerActionsV293TestJSON-1682501985-project-member] [instance: b5ebb553-9d00-4791-b563-8ddaa325dc88] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 964.192619] env[69784]: DEBUG nova.compute.manager [None req-112ae074-c27e-46b5-9669-606161082e62 tempest-ServerActionsV293TestJSON-1682501985 tempest-ServerActionsV293TestJSON-1682501985-project-member] [instance: b5ebb553-9d00-4791-b563-8ddaa325dc88] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 964.213131] env[69784]: DEBUG oslo_concurrency.lockutils [None req-112ae074-c27e-46b5-9669-606161082e62 tempest-ServerActionsV293TestJSON-1682501985 tempest-ServerActionsV293TestJSON-1682501985-project-member] Lock "b5ebb553-9d00-4791-b563-8ddaa325dc88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.086s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 964.225943] env[69784]: DEBUG nova.compute.manager [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 964.283342] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 964.283640] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 964.285164] env[69784]: INFO nova.compute.claims [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 964.407971] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquiring lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 964.655928] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea15d458-367c-45f1-9033-e8e72d4f5ee2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.662564] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 964.662826] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 964.667037] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7a55f4-e5dc-4d37-aa26-a22ab3e8b951 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.697037] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c91597-be23-46ee-a2df-53d0ae4d5beb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.704108] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fe542f-6a21-4f7d-847f-97d0a0abad16 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.718866] env[69784]: DEBUG nova.compute.provider_tree [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.729065] env[69784]: DEBUG nova.scheduler.client.report [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 964.744224] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.460s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 964.744963] env[69784]: DEBUG nova.compute.manager [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 964.776168] env[69784]: DEBUG nova.compute.claims [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 964.776392] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 964.776708] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 965.115792] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290600f4-96d1-4c88-867c-2c6a4b885bb8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.123100] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef815332-693b-462f-824d-3d9cbea6590d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.152606] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8406303c-fb50-4330-bb62-2c5aa67bbd6a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.159522] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb46b2b-8544-4f48-95fe-8584c4898cdf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.172338] env[69784]: DEBUG nova.compute.provider_tree [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.182757] env[69784]: DEBUG nova.scheduler.client.report [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 965.196638] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.420s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 965.197439] env[69784]: DEBUG nova.compute.utils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Conflict updating instance f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 965.198829] env[69784]: DEBUG nova.compute.manager [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Instance disappeared during build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 965.198998] env[69784]: DEBUG nova.compute.manager [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 965.199249] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquiring lock "refresh_cache-f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 965.199395] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquired lock "refresh_cache-f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 965.199576] env[69784]: DEBUG nova.network.neutron [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 965.245783] env[69784]: DEBUG nova.network.neutron [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.333199] env[69784]: DEBUG nova.network.neutron [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.345797] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Releasing lock "refresh_cache-f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 965.346038] env[69784]: DEBUG nova.compute.manager [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 965.346223] env[69784]: DEBUG nova.compute.manager [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Skipping network deallocation for instance since networking was not requested. {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 965.418733] env[69784]: INFO nova.scheduler.client.report [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Deleted allocations for instance f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6 [ 965.419077] env[69784]: DEBUG oslo_concurrency.lockutils [None req-27c1342e-37fc-4ecc-bc7e-df0b017b672d tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.332s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 965.420279] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.013s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 965.420529] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquiring lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 965.420766] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 965.420947] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 965.422863] env[69784]: INFO nova.compute.manager [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Terminating instance [ 965.424494] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquiring lock "refresh_cache-f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 965.424583] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Acquired lock "refresh_cache-f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 965.424736] env[69784]: DEBUG nova.network.neutron [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 965.431842] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 965.461277] env[69784]: DEBUG nova.network.neutron [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.480577] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 965.480843] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 965.482341] env[69784]: INFO nova.compute.claims [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.549930] env[69784]: DEBUG nova.network.neutron [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.558407] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Releasing lock "refresh_cache-f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 965.558922] env[69784]: DEBUG nova.compute.manager [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 965.559172] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 965.562636] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97f836d3-c654-4607-b66e-5313bfab55b9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.571607] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e63cc0-6d75-4271-99d3-c0aa536350b5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.605709] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6 could not be found. [ 965.605927] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 965.606115] env[69784]: INFO nova.compute.manager [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 965.606363] env[69784]: DEBUG oslo.service.loopingcall [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.606614] env[69784]: DEBUG nova.compute.manager [-] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 965.606718] env[69784]: DEBUG nova.network.neutron [-] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 965.624126] env[69784]: DEBUG nova.network.neutron [-] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.632971] env[69784]: DEBUG nova.network.neutron [-] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.641971] env[69784]: INFO nova.compute.manager [-] [instance: f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6] Took 0.03 seconds to deallocate network for instance. [ 965.731370] env[69784]: DEBUG oslo_concurrency.lockutils [None req-16fb0c97-b29d-42bd-a771-d8cad67c4d96 tempest-ServersAaction247Test-704321484 tempest-ServersAaction247Test-704321484-project-member] Lock "f73fc9bc-3704-4e18-96ab-b7c8ef5ddfc6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.311s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 965.881383] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1da28b-7ae8-4628-b12f-48fa60e67a8f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.888125] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80811132-c891-4509-acd4-72a83b6691b1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.919129] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f487fbc7-4648-4a8e-8c18-cc684d6654f9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.925441] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50487488-9d61-4bbd-bcb9-ddc58fb1d2ee {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.938281] env[69784]: DEBUG nova.compute.provider_tree [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.952553] env[69784]: DEBUG nova.scheduler.client.report [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 965.968925] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.488s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 965.969423] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 966.003059] env[69784]: DEBUG nova.compute.utils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 966.007743] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 966.007743] env[69784]: DEBUG nova.network.neutron [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 966.016503] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 966.081833] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 966.104118] env[69784]: DEBUG nova.policy [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '291f8f93940040e4a112f9e27e0a02ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e92cdeb6b85443d9b7035a4523733e13', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 966.116770] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 966.117067] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 966.117266] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 966.117466] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 966.117615] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 966.117764] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 966.117971] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 966.118145] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 966.118588] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 966.118588] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 966.118694] env[69784]: DEBUG nova.virt.hardware [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 966.119821] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b33ed0-cd65-4cf2-9ecc-95d87f212aa7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.127755] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1abd3f-1f90-43ba-ba35-93583d3559bd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.444128] env[69784]: DEBUG nova.network.neutron [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Successfully created port: 7fd61832-617c-4245-899b-28805b9a5599 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 967.205955] env[69784]: DEBUG nova.network.neutron [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Successfully updated port: 7fd61832-617c-4245-899b-28805b9a5599 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 967.218250] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "refresh_cache-c23e7041-ca02-4047-84d5-84b62f36b37f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 967.218250] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquired lock "refresh_cache-c23e7041-ca02-4047-84d5-84b62f36b37f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 967.218250] env[69784]: DEBUG nova.network.neutron [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 967.273209] env[69784]: DEBUG nova.network.neutron [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 967.356231] env[69784]: DEBUG nova.compute.manager [req-2556ca94-78b8-4c52-9721-1c98216f4990 req-7e9c667f-ba12-4160-801e-b2209f2d3f64 service nova] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Received event network-vif-plugged-7fd61832-617c-4245-899b-28805b9a5599 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 967.356500] env[69784]: DEBUG oslo_concurrency.lockutils [req-2556ca94-78b8-4c52-9721-1c98216f4990 req-7e9c667f-ba12-4160-801e-b2209f2d3f64 service nova] Acquiring lock "c23e7041-ca02-4047-84d5-84b62f36b37f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 967.356760] env[69784]: DEBUG oslo_concurrency.lockutils [req-2556ca94-78b8-4c52-9721-1c98216f4990 req-7e9c667f-ba12-4160-801e-b2209f2d3f64 service nova] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 967.356977] env[69784]: DEBUG oslo_concurrency.lockutils [req-2556ca94-78b8-4c52-9721-1c98216f4990 req-7e9c667f-ba12-4160-801e-b2209f2d3f64 service nova] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 967.357318] env[69784]: DEBUG nova.compute.manager [req-2556ca94-78b8-4c52-9721-1c98216f4990 req-7e9c667f-ba12-4160-801e-b2209f2d3f64 service nova] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] No waiting events found dispatching network-vif-plugged-7fd61832-617c-4245-899b-28805b9a5599 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 967.357531] env[69784]: WARNING nova.compute.manager [req-2556ca94-78b8-4c52-9721-1c98216f4990 req-7e9c667f-ba12-4160-801e-b2209f2d3f64 service nova] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Received unexpected event network-vif-plugged-7fd61832-617c-4245-899b-28805b9a5599 for instance with vm_state building and task_state spawning. [ 967.512565] env[69784]: DEBUG nova.network.neutron [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Updating instance_info_cache with network_info: [{"id": "7fd61832-617c-4245-899b-28805b9a5599", "address": "fa:16:3e:b1:81:47", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd61832-61", "ovs_interfaceid": "7fd61832-617c-4245-899b-28805b9a5599", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.523494] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Releasing lock "refresh_cache-c23e7041-ca02-4047-84d5-84b62f36b37f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 967.523848] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Instance network_info: |[{"id": "7fd61832-617c-4245-899b-28805b9a5599", "address": "fa:16:3e:b1:81:47", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd61832-61", "ovs_interfaceid": "7fd61832-617c-4245-899b-28805b9a5599", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 967.524366] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:81:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7fd61832-617c-4245-899b-28805b9a5599', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.533313] env[69784]: DEBUG oslo.service.loopingcall [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.533864] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 967.534148] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a2402f6-c4c5-4422-8380-daad132eabfd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.558340] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.558340] env[69784]: value = "task-3467082" [ 967.558340] env[69784]: _type = "Task" [ 967.558340] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.565920] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467082, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.067107] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467082, 'name': CreateVM_Task} progress is 99%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.567684] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467082, 'name': CreateVM_Task, 'duration_secs': 0.565863} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.567974] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 968.568517] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 968.568701] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 968.569024] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 968.569267] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b74aaae8-b7e4-4424-ba59-4652ac58c916 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.573689] env[69784]: DEBUG oslo_vmware.api [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for the task: (returnval){ [ 968.573689] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5227e4c1-55de-8e81-85f4-aa374fe6e7b9" [ 968.573689] env[69784]: _type = "Task" [ 968.573689] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.581456] env[69784]: DEBUG oslo_vmware.api [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5227e4c1-55de-8e81-85f4-aa374fe6e7b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.084110] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 969.084368] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.084578] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 969.396410] env[69784]: DEBUG nova.compute.manager [req-025004c9-26a9-46a8-9c58-49b0b96c1d72 req-0805f6e5-b2d9-4495-8bd4-1d7bbe51d3d3 service nova] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Received event network-changed-7fd61832-617c-4245-899b-28805b9a5599 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 969.396608] env[69784]: DEBUG nova.compute.manager [req-025004c9-26a9-46a8-9c58-49b0b96c1d72 req-0805f6e5-b2d9-4495-8bd4-1d7bbe51d3d3 service nova] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Refreshing instance network info cache due to event network-changed-7fd61832-617c-4245-899b-28805b9a5599. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 969.396780] env[69784]: DEBUG oslo_concurrency.lockutils [req-025004c9-26a9-46a8-9c58-49b0b96c1d72 req-0805f6e5-b2d9-4495-8bd4-1d7bbe51d3d3 service nova] Acquiring lock "refresh_cache-c23e7041-ca02-4047-84d5-84b62f36b37f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 969.396926] env[69784]: DEBUG oslo_concurrency.lockutils [req-025004c9-26a9-46a8-9c58-49b0b96c1d72 req-0805f6e5-b2d9-4495-8bd4-1d7bbe51d3d3 service nova] Acquired lock "refresh_cache-c23e7041-ca02-4047-84d5-84b62f36b37f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 969.397095] env[69784]: DEBUG nova.network.neutron [req-025004c9-26a9-46a8-9c58-49b0b96c1d72 req-0805f6e5-b2d9-4495-8bd4-1d7bbe51d3d3 service nova] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Refreshing network info cache for port 7fd61832-617c-4245-899b-28805b9a5599 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 969.684402] env[69784]: DEBUG nova.network.neutron [req-025004c9-26a9-46a8-9c58-49b0b96c1d72 req-0805f6e5-b2d9-4495-8bd4-1d7bbe51d3d3 service nova] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Updated VIF entry in instance network info cache for port 7fd61832-617c-4245-899b-28805b9a5599. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 969.684756] env[69784]: DEBUG nova.network.neutron [req-025004c9-26a9-46a8-9c58-49b0b96c1d72 req-0805f6e5-b2d9-4495-8bd4-1d7bbe51d3d3 service nova] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Updating instance_info_cache with network_info: [{"id": "7fd61832-617c-4245-899b-28805b9a5599", "address": "fa:16:3e:b1:81:47", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd61832-61", "ovs_interfaceid": "7fd61832-617c-4245-899b-28805b9a5599", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.695272] env[69784]: DEBUG oslo_concurrency.lockutils [req-025004c9-26a9-46a8-9c58-49b0b96c1d72 req-0805f6e5-b2d9-4495-8bd4-1d7bbe51d3d3 service nova] Releasing lock "refresh_cache-c23e7041-ca02-4047-84d5-84b62f36b37f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 971.839583] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.839879] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.840027] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.840154] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.840302] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 974.836243] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.838822] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.839015] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.851598] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 974.851855] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 974.852019] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 974.852183] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 974.853341] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93611ac-ddf7-4a3f-ba11-8c45c894d6f7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.862082] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfffc47b-5727-4c9b-9ae1-34d6635e7566 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.877551] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069c4694-8a22-43be-999b-f99d899c1806 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.884104] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e356d529-f9d6-4275-aa62-83a7766c35c5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.914382] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180836MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 974.914618] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 974.914793] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 974.999738] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a927e0ff-6c39-47fd-a082-88d41eb54015 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.999902] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c16f7920-23d4-4d77-b70f-118887cc9ff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.000041] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c43ca674-06b8-4b5d-a709-2df095b509f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.000171] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.000623] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 65601835-8d30-46b8-b928-b3912d058c6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.000623] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.000623] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.000838] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.000898] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.000982] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.012476] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.026434] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.039280] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d48c4130-2875-4704-bbb5-75c17fd497c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.050405] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f763fb3a-9db8-457a-a713-9aa2abf9e440 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.060445] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6d0fb95f-194e-49ca-8992-e2cec634a5bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.073567] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 47224bae-e259-4517-a379-0561e3812057 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.085777] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 82981529-56c3-43c0-8d33-c2f2b0875bfc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.097997] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.109377] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 34f889c0-3105-49d3-a2f0-9cf250ab3c4b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.122565] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 322c8dbc-6c6f-4343-ba39-8301c886210c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.134055] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f8902bba-d976-47c2-b034-a9438b6c467b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.164158] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4091d39a-80ff-43f9-8194-e995838ecb0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.175513] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 098647d8-eb8e-4494-8458-857e152e0ff8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.186130] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 210a8b8c-13f0-4cb9-9d92-2b018291011b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.197036] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4f0df17c-a95e-424c-8444-8cea31190bae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.206897] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.207151] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 975.207299] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 975.522584] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bf4013-d2a7-49ed-b60f-619ad35436f2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.529901] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96689cb3-9e9d-4464-b1e6-830e3ce6eccb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.559432] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fddb56b-9944-4cfd-bdfb-5a8e3cc930c7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.566551] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee42cb0c-65be-4c15-8f81-f076582d9fc4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.580057] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.588012] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 975.601542] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 975.601732] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.687s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 976.603402] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.603723] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 976.603723] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 976.623915] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.624507] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.624507] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.624507] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.624688] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.624688] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.624794] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.624915] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.625042] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.625226] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 976.625276] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 976.839780] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.861668] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.362242] env[69784]: WARNING oslo_vmware.rw_handles [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1008.362242] env[69784]: ERROR oslo_vmware.rw_handles [ 1008.362837] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1008.364619] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1008.364890] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Copying Virtual Disk [datastore1] vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/5bcf356f-dc88-4c2a-9d2c-086db6772d3d/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1008.365151] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62650840-8938-466a-a7f7-bbc2c23a20f4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.373379] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Waiting for the task: (returnval){ [ 1008.373379] env[69784]: value = "task-3467083" [ 1008.373379] env[69784]: _type = "Task" [ 1008.373379] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.381062] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Task: {'id': task-3467083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.884379] env[69784]: DEBUG oslo_vmware.exceptions [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1008.884663] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1008.885220] env[69784]: ERROR nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1008.885220] env[69784]: Faults: ['InvalidArgument'] [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Traceback (most recent call last): [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] yield resources [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] self.driver.spawn(context, instance, image_meta, [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] self._fetch_image_if_missing(context, vi) [ 1008.885220] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] image_cache(vi, tmp_image_ds_loc) [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] vm_util.copy_virtual_disk( [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] session._wait_for_task(vmdk_copy_task) [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] return self.wait_for_task(task_ref) [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] return evt.wait() [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] result = hub.switch() [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1008.885629] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] return self.greenlet.switch() [ 1008.886065] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1008.886065] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] self.f(*self.args, **self.kw) [ 1008.886065] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1008.886065] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] raise exceptions.translate_fault(task_info.error) [ 1008.886065] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1008.886065] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Faults: ['InvalidArgument'] [ 1008.886065] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] [ 1008.886065] env[69784]: INFO nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Terminating instance [ 1008.887073] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1008.887319] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.887952] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1008.888244] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1008.888392] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b61c4304-19c5-4bb0-a880-ebb78201d1dc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.890746] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18f5390-f48d-49e6-a454-4dc6f902c3e7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.898568] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1008.899620] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30401d70-daa3-4376-98ac-aa2b8e9087a6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.901391] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.901391] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1008.901992] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73b8d075-ecea-4d19-8f5e-add23852b209 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.907445] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Waiting for the task: (returnval){ [ 1008.907445] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5241673f-f9b8-5399-ab21-e1343dd9388c" [ 1008.907445] env[69784]: _type = "Task" [ 1008.907445] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.914961] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5241673f-f9b8-5399-ab21-e1343dd9388c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.973540] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1008.973540] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1008.973540] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Deleting the datastore file [datastore1] a927e0ff-6c39-47fd-a082-88d41eb54015 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.973824] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-986c074b-35e3-45e9-a04d-deb86db00c4f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.979710] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Waiting for the task: (returnval){ [ 1008.979710] env[69784]: value = "task-3467085" [ 1008.979710] env[69784]: _type = "Task" [ 1008.979710] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.987406] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Task: {'id': task-3467085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.418517] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1009.418792] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Creating directory with path [datastore1] vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.419038] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fd0f57c-3b3f-4e87-ad51-45a3e2b3f502 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.430736] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Created directory with path [datastore1] vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.430956] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Fetch image to [datastore1] vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1009.431152] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1009.431876] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bad7668-da1c-43fe-9f70-7284c5ba3305 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.438560] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69636125-e22a-45cd-8d3b-9ec624429d1f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.447534] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe5a11d-4dee-40ae-a432-7753c7c74039 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.484723] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0898ebd-27bf-41a0-bddf-9bdab573b228 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.492239] env[69784]: DEBUG oslo_vmware.api [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Task: {'id': task-3467085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078354} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.493732] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.493924] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1009.494113] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1009.494290] env[69784]: INFO nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1009.496298] env[69784]: DEBUG nova.compute.claims [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1009.496473] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1009.496715] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1009.499945] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4525cc29-8fc7-4d6b-bf4e-d637de535408 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.521810] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1009.579019] env[69784]: DEBUG oslo_vmware.rw_handles [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1009.638879] env[69784]: DEBUG oslo_vmware.rw_handles [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1009.639070] env[69784]: DEBUG oslo_vmware.rw_handles [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1009.928639] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3c8cfb-ce5a-4203-b5f8-dc1039a70861 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.936488] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1e9c9f-8f67-43f9-8d85-818a26b1f362 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.966308] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b09266e-4a7b-4e1f-8329-3c6a45659cb5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.974261] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef21a676-d8ac-41b6-934d-0987d57a902e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.989196] env[69784]: DEBUG nova.compute.provider_tree [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.997742] env[69784]: DEBUG nova.scheduler.client.report [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1010.012810] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.516s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1010.013036] env[69784]: ERROR nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1010.013036] env[69784]: Faults: ['InvalidArgument'] [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Traceback (most recent call last): [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] self.driver.spawn(context, instance, image_meta, [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] self._fetch_image_if_missing(context, vi) [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] image_cache(vi, tmp_image_ds_loc) [ 1010.013036] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] vm_util.copy_virtual_disk( [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] session._wait_for_task(vmdk_copy_task) [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] return self.wait_for_task(task_ref) [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] return evt.wait() [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] result = hub.switch() [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] return self.greenlet.switch() [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1010.013418] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] self.f(*self.args, **self.kw) [ 1010.013764] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1010.013764] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] raise exceptions.translate_fault(task_info.error) [ 1010.013764] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1010.013764] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Faults: ['InvalidArgument'] [ 1010.013764] env[69784]: ERROR nova.compute.manager [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] [ 1010.013764] env[69784]: DEBUG nova.compute.utils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1010.015237] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Build of instance a927e0ff-6c39-47fd-a082-88d41eb54015 was re-scheduled: A specified parameter was not correct: fileType [ 1010.015237] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1010.015645] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1010.015818] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1010.015971] env[69784]: DEBUG nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1010.016163] env[69784]: DEBUG nova.network.neutron [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1010.347395] env[69784]: DEBUG nova.network.neutron [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.377406] env[69784]: INFO nova.compute.manager [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Took 0.36 seconds to deallocate network for instance. [ 1010.489875] env[69784]: INFO nova.scheduler.client.report [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Deleted allocations for instance a927e0ff-6c39-47fd-a082-88d41eb54015 [ 1010.512506] env[69784]: DEBUG oslo_concurrency.lockutils [None req-805bfa54-3b22-4399-bc2c-351df7778003 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 347.437s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1010.513740] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 337.612s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1010.513927] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] During sync_power_state the instance has a pending task (spawning). Skip. [ 1010.514118] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1010.514737] env[69784]: DEBUG oslo_concurrency.lockutils [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 148.253s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1010.514952] env[69784]: DEBUG oslo_concurrency.lockutils [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "a927e0ff-6c39-47fd-a082-88d41eb54015-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1010.515473] env[69784]: DEBUG oslo_concurrency.lockutils [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1010.515653] env[69784]: DEBUG oslo_concurrency.lockutils [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1010.517811] env[69784]: INFO nova.compute.manager [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Terminating instance [ 1010.519618] env[69784]: DEBUG nova.compute.manager [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1010.519719] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1010.519967] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-899ba364-211a-4960-ab3d-cc4a1ab74e25 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.529447] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54c431b-fc2d-4f44-a9c9-c38c2ed9d0c5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.540614] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1010.563058] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a927e0ff-6c39-47fd-a082-88d41eb54015 could not be found. [ 1010.563203] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1010.563325] env[69784]: INFO nova.compute.manager [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1010.563570] env[69784]: DEBUG oslo.service.loopingcall [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.563797] env[69784]: DEBUG nova.compute.manager [-] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1010.563915] env[69784]: DEBUG nova.network.neutron [-] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1010.588765] env[69784]: DEBUG nova.network.neutron [-] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.595114] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1010.595176] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1010.597031] env[69784]: INFO nova.compute.claims [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1010.600255] env[69784]: INFO nova.compute.manager [-] [instance: a927e0ff-6c39-47fd-a082-88d41eb54015] Took 0.04 seconds to deallocate network for instance. [ 1010.691733] env[69784]: DEBUG oslo_concurrency.lockutils [None req-058c580b-f4ba-4bee-af39-270354fdb3c3 tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "a927e0ff-6c39-47fd-a082-88d41eb54015" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.177s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1010.946541] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef344e12-696d-457f-a5ab-c50510c22cfe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.954199] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6b9a44-00cf-4da0-90e0-34c6a15e7ae9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.984024] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e52cca-08fc-4fa7-acc7-b55b8121755d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.991081] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6da952e-b235-41cc-90fd-f7b9eefa9725 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.004452] env[69784]: DEBUG nova.compute.provider_tree [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.013077] env[69784]: DEBUG nova.scheduler.client.report [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1011.043948] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.449s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1011.044758] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1011.081046] env[69784]: DEBUG nova.compute.utils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1011.082413] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1011.082621] env[69784]: DEBUG nova.network.neutron [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1011.093227] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1011.163053] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1011.197590] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1011.197838] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1011.197995] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.198264] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1011.198420] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.198565] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1011.198773] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1011.198929] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1011.199112] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1011.199278] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1011.199446] env[69784]: DEBUG nova.virt.hardware [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1011.200546] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eddbc15-dfe0-4cdc-93c9-b71a917a8441 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.204416] env[69784]: DEBUG nova.policy [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8379f4f7bd9e4c928fa84aaf58a5f5e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aba165c044c2457d9d9bc0a8968f9fa8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1011.211434] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76de1d98-d039-4ab3-9066-95dcf6c0bf8b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.826228] env[69784]: DEBUG nova.network.neutron [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Successfully created port: 25262b09-e54c-4fde-8b0b-5f89ab4ceedf {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1012.203679] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "c23e7041-ca02-4047-84d5-84b62f36b37f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1012.604135] env[69784]: DEBUG nova.compute.manager [req-7679526f-bac9-40da-a8dd-13c321e04bc1 req-910b595f-eae6-4a40-b6f0-305379ae74db service nova] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Received event network-vif-plugged-25262b09-e54c-4fde-8b0b-5f89ab4ceedf {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1012.604532] env[69784]: DEBUG oslo_concurrency.lockutils [req-7679526f-bac9-40da-a8dd-13c321e04bc1 req-910b595f-eae6-4a40-b6f0-305379ae74db service nova] Acquiring lock "7a640743-734e-4dc0-a965-0a71dddfb918-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1012.604571] env[69784]: DEBUG oslo_concurrency.lockutils [req-7679526f-bac9-40da-a8dd-13c321e04bc1 req-910b595f-eae6-4a40-b6f0-305379ae74db service nova] Lock "7a640743-734e-4dc0-a965-0a71dddfb918-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1012.604917] env[69784]: DEBUG oslo_concurrency.lockutils [req-7679526f-bac9-40da-a8dd-13c321e04bc1 req-910b595f-eae6-4a40-b6f0-305379ae74db service nova] Lock "7a640743-734e-4dc0-a965-0a71dddfb918-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1012.605475] env[69784]: DEBUG nova.compute.manager [req-7679526f-bac9-40da-a8dd-13c321e04bc1 req-910b595f-eae6-4a40-b6f0-305379ae74db service nova] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] No waiting events found dispatching network-vif-plugged-25262b09-e54c-4fde-8b0b-5f89ab4ceedf {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1012.605675] env[69784]: WARNING nova.compute.manager [req-7679526f-bac9-40da-a8dd-13c321e04bc1 req-910b595f-eae6-4a40-b6f0-305379ae74db service nova] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Received unexpected event network-vif-plugged-25262b09-e54c-4fde-8b0b-5f89ab4ceedf for instance with vm_state building and task_state spawning. [ 1012.625982] env[69784]: DEBUG nova.network.neutron [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Successfully updated port: 25262b09-e54c-4fde-8b0b-5f89ab4ceedf {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1012.635645] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquiring lock "refresh_cache-7a640743-734e-4dc0-a965-0a71dddfb918" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1012.635790] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquired lock "refresh_cache-7a640743-734e-4dc0-a965-0a71dddfb918" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1012.636097] env[69784]: DEBUG nova.network.neutron [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1012.693666] env[69784]: DEBUG nova.network.neutron [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1012.846771] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2e58f984-c062-45e2-bcc3-e1ac851fca6b tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Acquiring lock "5892a6e6-2829-48a0-877d-6c4307861a05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1012.847088] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2e58f984-c062-45e2-bcc3-e1ac851fca6b tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "5892a6e6-2829-48a0-877d-6c4307861a05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1012.913322] env[69784]: DEBUG nova.network.neutron [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Updating instance_info_cache with network_info: [{"id": "25262b09-e54c-4fde-8b0b-5f89ab4ceedf", "address": "fa:16:3e:99:85:1e", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25262b09-e5", "ovs_interfaceid": "25262b09-e54c-4fde-8b0b-5f89ab4ceedf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.927250] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Releasing lock "refresh_cache-7a640743-734e-4dc0-a965-0a71dddfb918" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1012.927656] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Instance network_info: |[{"id": "25262b09-e54c-4fde-8b0b-5f89ab4ceedf", "address": "fa:16:3e:99:85:1e", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25262b09-e5", "ovs_interfaceid": "25262b09-e54c-4fde-8b0b-5f89ab4ceedf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1012.928067] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:85:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25262b09-e54c-4fde-8b0b-5f89ab4ceedf', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1012.935444] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Creating folder: Project (aba165c044c2457d9d9bc0a8968f9fa8). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1012.935982] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0fe5d82-c612-4107-8476-70b74b23405c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.946912] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Created folder: Project (aba165c044c2457d9d9bc0a8968f9fa8) in parent group-v692547. [ 1012.947150] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Creating folder: Instances. Parent ref: group-v692598. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1012.947392] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89e5cbce-4e5a-4ffc-bf42-69cbcf42d79d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.956857] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Created folder: Instances in parent group-v692598. [ 1012.957105] env[69784]: DEBUG oslo.service.loopingcall [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.957326] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1012.957492] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6443d0b8-97b1-4593-b989-2c9d6a648c28 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.977334] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1012.977334] env[69784]: value = "task-3467088" [ 1012.977334] env[69784]: _type = "Task" [ 1012.977334] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.984916] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467088, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.486945] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467088, 'name': CreateVM_Task, 'duration_secs': 0.295204} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.487135] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1013.487791] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1013.487956] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1013.488300] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1013.488543] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee12cf75-5fa2-4095-8155-8a22b5b27e2e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.492763] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Waiting for the task: (returnval){ [ 1013.492763] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52274581-1b26-dd2c-e6d1-52a74da4fcb5" [ 1013.492763] env[69784]: _type = "Task" [ 1013.492763] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.499824] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52274581-1b26-dd2c-e6d1-52a74da4fcb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.005162] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1014.005162] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1014.005162] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1014.631471] env[69784]: DEBUG nova.compute.manager [req-408ac135-a99a-43f8-9b5d-be3285f062f8 req-f2ee499a-fb92-4746-9517-5ac0cede726e service nova] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Received event network-changed-25262b09-e54c-4fde-8b0b-5f89ab4ceedf {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1014.631602] env[69784]: DEBUG nova.compute.manager [req-408ac135-a99a-43f8-9b5d-be3285f062f8 req-f2ee499a-fb92-4746-9517-5ac0cede726e service nova] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Refreshing instance network info cache due to event network-changed-25262b09-e54c-4fde-8b0b-5f89ab4ceedf. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1014.631820] env[69784]: DEBUG oslo_concurrency.lockutils [req-408ac135-a99a-43f8-9b5d-be3285f062f8 req-f2ee499a-fb92-4746-9517-5ac0cede726e service nova] Acquiring lock "refresh_cache-7a640743-734e-4dc0-a965-0a71dddfb918" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1014.631995] env[69784]: DEBUG oslo_concurrency.lockutils [req-408ac135-a99a-43f8-9b5d-be3285f062f8 req-f2ee499a-fb92-4746-9517-5ac0cede726e service nova] Acquired lock "refresh_cache-7a640743-734e-4dc0-a965-0a71dddfb918" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1014.632306] env[69784]: DEBUG nova.network.neutron [req-408ac135-a99a-43f8-9b5d-be3285f062f8 req-f2ee499a-fb92-4746-9517-5ac0cede726e service nova] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Refreshing network info cache for port 25262b09-e54c-4fde-8b0b-5f89ab4ceedf {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1014.939316] env[69784]: DEBUG nova.network.neutron [req-408ac135-a99a-43f8-9b5d-be3285f062f8 req-f2ee499a-fb92-4746-9517-5ac0cede726e service nova] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Updated VIF entry in instance network info cache for port 25262b09-e54c-4fde-8b0b-5f89ab4ceedf. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1014.939684] env[69784]: DEBUG nova.network.neutron [req-408ac135-a99a-43f8-9b5d-be3285f062f8 req-f2ee499a-fb92-4746-9517-5ac0cede726e service nova] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Updating instance_info_cache with network_info: [{"id": "25262b09-e54c-4fde-8b0b-5f89ab4ceedf", "address": "fa:16:3e:99:85:1e", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25262b09-e5", "ovs_interfaceid": "25262b09-e54c-4fde-8b0b-5f89ab4ceedf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.951693] env[69784]: DEBUG oslo_concurrency.lockutils [req-408ac135-a99a-43f8-9b5d-be3285f062f8 req-f2ee499a-fb92-4746-9517-5ac0cede726e service nova] Releasing lock "refresh_cache-7a640743-734e-4dc0-a965-0a71dddfb918" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1031.840772] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.841087] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.841352] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1032.840684] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.840194] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.840241] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.840513] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1035.840551] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1035.879530] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879530] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879530] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879530] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879530] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879789] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879789] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879789] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879789] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879789] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1035.879974] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1035.879974] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1036.840594] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1036.840912] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1036.853437] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1036.853672] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1036.853840] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1036.853994] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1036.855602] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78aa4833-9931-4d43-b4a5-14e1a488b8d3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.864321] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353163fa-3518-48a0-8a7f-86282023e27f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.878490] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd7d6d5-8661-42cb-a88a-ff9064cdb3e8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.884861] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa670d5f-f9bd-48c1-a92d-cac9f9470353 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.915249] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180957MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1036.915418] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1036.915606] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1036.988751] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c16f7920-23d4-4d77-b70f-118887cc9ff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.988911] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c43ca674-06b8-4b5d-a709-2df095b509f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.989056] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.989186] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 65601835-8d30-46b8-b928-b3912d058c6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.989309] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.989426] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.989544] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.989704] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.989832] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.989951] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1037.001251] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.011370] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d48c4130-2875-4704-bbb5-75c17fd497c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.020705] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f763fb3a-9db8-457a-a713-9aa2abf9e440 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.030579] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6d0fb95f-194e-49ca-8992-e2cec634a5bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.039946] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 47224bae-e259-4517-a379-0561e3812057 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.050744] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 82981529-56c3-43c0-8d33-c2f2b0875bfc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.060485] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.069691] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 34f889c0-3105-49d3-a2f0-9cf250ab3c4b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.078828] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 322c8dbc-6c6f-4343-ba39-8301c886210c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.089023] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f8902bba-d976-47c2-b034-a9438b6c467b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.099766] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4091d39a-80ff-43f9-8194-e995838ecb0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.109491] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 098647d8-eb8e-4494-8458-857e152e0ff8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.119330] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 210a8b8c-13f0-4cb9-9d92-2b018291011b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.128278] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4f0df17c-a95e-424c-8444-8cea31190bae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.137430] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.146652] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 5892a6e6-2829-48a0-877d-6c4307861a05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.146891] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1037.147051] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1037.454054] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e09ba4-d71e-42ef-b5fa-bafc24109b4f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.460161] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc2bfbc-d3dc-44b4-bc92-a8a5ad102e0d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.491256] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1630706-6db3-438b-8b0c-93ec6a3bba7d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.498285] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecdeb54-15eb-4e4f-a38d-f9fbf4687634 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.511557] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.520029] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1037.534441] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1037.534617] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.619s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1039.534014] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.381593] env[69784]: WARNING oslo_vmware.rw_handles [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1058.381593] env[69784]: ERROR oslo_vmware.rw_handles [ 1058.382170] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1058.384044] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1058.384296] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Copying Virtual Disk [datastore1] vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/ce84afaf-ca92-4d91-a72d-92e11b6e4a8f/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1058.384576] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97680fda-0c00-4cf8-b2d0-ecb977e45251 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.391842] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Waiting for the task: (returnval){ [ 1058.391842] env[69784]: value = "task-3467089" [ 1058.391842] env[69784]: _type = "Task" [ 1058.391842] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.399754] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Task: {'id': task-3467089, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.903371] env[69784]: DEBUG oslo_vmware.exceptions [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1058.903687] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1058.904259] env[69784]: ERROR nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1058.904259] env[69784]: Faults: ['InvalidArgument'] [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Traceback (most recent call last): [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] yield resources [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] self.driver.spawn(context, instance, image_meta, [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] self._fetch_image_if_missing(context, vi) [ 1058.904259] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] image_cache(vi, tmp_image_ds_loc) [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] vm_util.copy_virtual_disk( [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] session._wait_for_task(vmdk_copy_task) [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] return self.wait_for_task(task_ref) [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] return evt.wait() [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] result = hub.switch() [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1058.904571] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] return self.greenlet.switch() [ 1058.904848] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1058.904848] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] self.f(*self.args, **self.kw) [ 1058.904848] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1058.904848] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] raise exceptions.translate_fault(task_info.error) [ 1058.904848] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1058.904848] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Faults: ['InvalidArgument'] [ 1058.904848] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] [ 1058.904848] env[69784]: INFO nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Terminating instance [ 1058.906101] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1058.906314] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1058.906562] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2953905-5433-4f9d-b9fa-e6c2b2f96157 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.908773] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1058.908964] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1058.909716] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09a1ca0-4ab8-4be4-a1f4-29d9b5385116 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.916980] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1058.917211] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec7c74b0-d687-4d8c-803f-3841c9c0ad09 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.919566] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1058.919675] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1058.920620] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c6656c5-dc27-44b3-bc01-781c7c388eba {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.925397] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1058.925397] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c1df7f-f162-5189-a61c-b91e5cc4e36f" [ 1058.925397] env[69784]: _type = "Task" [ 1058.925397] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.932368] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c1df7f-f162-5189-a61c-b91e5cc4e36f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.992898] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1058.993143] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1058.993334] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Deleting the datastore file [datastore1] c16f7920-23d4-4d77-b70f-118887cc9ff7 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.993636] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-872e7473-1a46-4fa3-9313-52c7f689a0d2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.999743] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Waiting for the task: (returnval){ [ 1058.999743] env[69784]: value = "task-3467091" [ 1058.999743] env[69784]: _type = "Task" [ 1058.999743] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.007430] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Task: {'id': task-3467091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.435224] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1059.435487] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating directory with path [datastore1] vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1059.435716] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1654a731-eaf2-4993-8ae6-c1ae85fa0073 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.447463] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Created directory with path [datastore1] vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1059.447605] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Fetch image to [datastore1] vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1059.447783] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1059.448515] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8c7698-3427-4421-beb2-261272823d63 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.454930] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37da3e9e-8f1a-4071-8146-f6ca8c424706 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.463982] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458e5b1e-a8cc-4ad6-a64e-a4330700249a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.494928] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ee979b-7d8f-48b0-a0a1-28e2f17ba67a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.502992] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0098d0f1-6e5b-49ff-9f1b-21b08117f495 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.508939] env[69784]: DEBUG oslo_vmware.api [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Task: {'id': task-3467091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101277} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.509184] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.509362] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1059.509547] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1059.509722] env[69784]: INFO nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1059.511779] env[69784]: DEBUG nova.compute.claims [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1059.511947] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1059.512179] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1059.524588] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1059.578832] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1059.641314] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1059.641314] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1059.911800] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293660f3-3fbf-4d9e-aec2-136a89b4abcb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.919691] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4b5ae5-6929-4383-bbc5-85cc7cd26460 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.951294] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c6c3b8-0aca-4b82-9e86-a4bd05fd2037 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.958591] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83f38a3-7477-4671-9059-4a230f4f2320 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.971699] env[69784]: DEBUG nova.compute.provider_tree [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.980289] env[69784]: DEBUG nova.scheduler.client.report [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1059.994035] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.482s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1059.994542] env[69784]: ERROR nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1059.994542] env[69784]: Faults: ['InvalidArgument'] [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Traceback (most recent call last): [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] self.driver.spawn(context, instance, image_meta, [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] self._fetch_image_if_missing(context, vi) [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] image_cache(vi, tmp_image_ds_loc) [ 1059.994542] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] vm_util.copy_virtual_disk( [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] session._wait_for_task(vmdk_copy_task) [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] return self.wait_for_task(task_ref) [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] return evt.wait() [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] result = hub.switch() [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] return self.greenlet.switch() [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1059.994839] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] self.f(*self.args, **self.kw) [ 1059.995117] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1059.995117] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] raise exceptions.translate_fault(task_info.error) [ 1059.995117] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1059.995117] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Faults: ['InvalidArgument'] [ 1059.995117] env[69784]: ERROR nova.compute.manager [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] [ 1059.995221] env[69784]: DEBUG nova.compute.utils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1059.996518] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Build of instance c16f7920-23d4-4d77-b70f-118887cc9ff7 was re-scheduled: A specified parameter was not correct: fileType [ 1059.996518] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1059.996885] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1059.997070] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1059.997227] env[69784]: DEBUG nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1059.997394] env[69784]: DEBUG nova.network.neutron [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1060.333406] env[69784]: DEBUG nova.network.neutron [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.345192] env[69784]: INFO nova.compute.manager [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Took 0.35 seconds to deallocate network for instance. [ 1060.437448] env[69784]: INFO nova.scheduler.client.report [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Deleted allocations for instance c16f7920-23d4-4d77-b70f-118887cc9ff7 [ 1060.456425] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f197cf71-cec7-45ad-8242-ea0fa3db83fe tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "c16f7920-23d4-4d77-b70f-118887cc9ff7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 387.403s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1060.457595] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "c16f7920-23d4-4d77-b70f-118887cc9ff7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 188.122s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1060.457814] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Acquiring lock "c16f7920-23d4-4d77-b70f-118887cc9ff7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1060.458046] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "c16f7920-23d4-4d77-b70f-118887cc9ff7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1060.458221] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "c16f7920-23d4-4d77-b70f-118887cc9ff7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1060.462849] env[69784]: INFO nova.compute.manager [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Terminating instance [ 1060.464545] env[69784]: DEBUG nova.compute.manager [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1060.464739] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1060.464990] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-534c8b3b-1037-4088-95c5-21884f73c299 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.468901] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1060.475380] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67868dc4-06d9-45a2-a14c-509cdd130d83 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.503993] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c16f7920-23d4-4d77-b70f-118887cc9ff7 could not be found. [ 1060.504275] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1060.504493] env[69784]: INFO nova.compute.manager [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1060.504777] env[69784]: DEBUG oslo.service.loopingcall [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1060.507189] env[69784]: DEBUG nova.compute.manager [-] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1060.507332] env[69784]: DEBUG nova.network.neutron [-] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1060.521471] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1060.521713] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1060.523189] env[69784]: INFO nova.compute.claims [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1060.539179] env[69784]: DEBUG nova.network.neutron [-] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.563336] env[69784]: INFO nova.compute.manager [-] [instance: c16f7920-23d4-4d77-b70f-118887cc9ff7] Took 0.06 seconds to deallocate network for instance. [ 1060.648757] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f545ad18-b9b8-43a3-9d18-21b1de111328 tempest-ServerExternalEventsTest-1162967698 tempest-ServerExternalEventsTest-1162967698-project-member] Lock "c16f7920-23d4-4d77-b70f-118887cc9ff7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1060.868577] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afeeed46-bcfe-4cd8-b1e1-a19551eef47f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.876157] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5430228-b495-4ba8-bd7b-85f146d653f9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.905707] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd9d265-3b7f-4076-9caf-c26e146fe2c8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.912794] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ea281d-f78a-4124-b50d-c4e03882cfcb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.926193] env[69784]: DEBUG nova.compute.provider_tree [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.934700] env[69784]: DEBUG nova.scheduler.client.report [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1060.950172] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.428s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1060.950678] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1060.983040] env[69784]: DEBUG nova.compute.utils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1060.984502] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1060.984674] env[69784]: DEBUG nova.network.neutron [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1060.993731] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1061.063884] env[69784]: DEBUG nova.policy [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b6acb83711d429986f7643518141475', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f96715424394efda73ca69f1a7b4a47', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1061.067477] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1061.094776] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1061.094933] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1061.095065] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1061.095248] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1061.096358] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1061.096358] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1061.096358] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1061.096358] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1061.096358] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1061.096630] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1061.096630] env[69784]: DEBUG nova.virt.hardware [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1061.097643] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7241802f-0e94-41ee-b4ab-5e75ae1c3847 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.106338] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707bbd1e-d11d-4500-b839-a605c2801e9c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.462988] env[69784]: DEBUG nova.network.neutron [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Successfully created port: 4a578d79-2da8-4187-bf4e-e9d1318539db {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1062.404274] env[69784]: DEBUG nova.compute.manager [req-415bcff4-eca4-4821-9413-b7ec64f8ac23 req-8564a921-3cab-4488-8bf4-6e0a960c71de service nova] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Received event network-vif-plugged-4a578d79-2da8-4187-bf4e-e9d1318539db {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1062.404509] env[69784]: DEBUG oslo_concurrency.lockutils [req-415bcff4-eca4-4821-9413-b7ec64f8ac23 req-8564a921-3cab-4488-8bf4-6e0a960c71de service nova] Acquiring lock "288af650-a19b-4ce5-baea-013dcaa6e908-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1062.404777] env[69784]: DEBUG oslo_concurrency.lockutils [req-415bcff4-eca4-4821-9413-b7ec64f8ac23 req-8564a921-3cab-4488-8bf4-6e0a960c71de service nova] Lock "288af650-a19b-4ce5-baea-013dcaa6e908-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1062.404883] env[69784]: DEBUG oslo_concurrency.lockutils [req-415bcff4-eca4-4821-9413-b7ec64f8ac23 req-8564a921-3cab-4488-8bf4-6e0a960c71de service nova] Lock "288af650-a19b-4ce5-baea-013dcaa6e908-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1062.405459] env[69784]: DEBUG nova.compute.manager [req-415bcff4-eca4-4821-9413-b7ec64f8ac23 req-8564a921-3cab-4488-8bf4-6e0a960c71de service nova] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] No waiting events found dispatching network-vif-plugged-4a578d79-2da8-4187-bf4e-e9d1318539db {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1062.405693] env[69784]: WARNING nova.compute.manager [req-415bcff4-eca4-4821-9413-b7ec64f8ac23 req-8564a921-3cab-4488-8bf4-6e0a960c71de service nova] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Received unexpected event network-vif-plugged-4a578d79-2da8-4187-bf4e-e9d1318539db for instance with vm_state building and task_state spawning. [ 1062.477818] env[69784]: DEBUG oslo_concurrency.lockutils [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquiring lock "7a640743-734e-4dc0-a965-0a71dddfb918" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1062.511117] env[69784]: DEBUG nova.network.neutron [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Successfully updated port: 4a578d79-2da8-4187-bf4e-e9d1318539db {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1062.527746] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "refresh_cache-288af650-a19b-4ce5-baea-013dcaa6e908" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1062.527904] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquired lock "refresh_cache-288af650-a19b-4ce5-baea-013dcaa6e908" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1062.528072] env[69784]: DEBUG nova.network.neutron [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1062.589528] env[69784]: DEBUG nova.network.neutron [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1062.838750] env[69784]: DEBUG nova.network.neutron [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Updating instance_info_cache with network_info: [{"id": "4a578d79-2da8-4187-bf4e-e9d1318539db", "address": "fa:16:3e:ce:f0:0c", "network": {"id": "4d101edd-9f9a-4911-90c3-04d47086524d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-625414710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f96715424394efda73ca69f1a7b4a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a578d79-2d", "ovs_interfaceid": "4a578d79-2da8-4187-bf4e-e9d1318539db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.854470] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Releasing lock "refresh_cache-288af650-a19b-4ce5-baea-013dcaa6e908" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1062.854783] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Instance network_info: |[{"id": "4a578d79-2da8-4187-bf4e-e9d1318539db", "address": "fa:16:3e:ce:f0:0c", "network": {"id": "4d101edd-9f9a-4911-90c3-04d47086524d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-625414710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f96715424394efda73ca69f1a7b4a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a578d79-2d", "ovs_interfaceid": "4a578d79-2da8-4187-bf4e-e9d1318539db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1062.855200] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:f0:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a578d79-2da8-4187-bf4e-e9d1318539db', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.863929] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Creating folder: Project (9f96715424394efda73ca69f1a7b4a47). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1062.864590] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9df2119c-3325-4b5a-825e-e45866fc89b0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.874966] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Created folder: Project (9f96715424394efda73ca69f1a7b4a47) in parent group-v692547. [ 1062.875176] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Creating folder: Instances. Parent ref: group-v692601. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1062.875405] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d39d8a4c-b9fd-412c-b77a-43958cfdada7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.884046] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Created folder: Instances in parent group-v692601. [ 1062.884046] env[69784]: DEBUG oslo.service.loopingcall [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.884046] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1062.884046] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e41822b9-2218-43a1-8272-ead22e3cd44e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.902740] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.902740] env[69784]: value = "task-3467094" [ 1062.902740] env[69784]: _type = "Task" [ 1062.902740] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.910173] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467094, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.412763] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467094, 'name': CreateVM_Task} progress is 25%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.912547] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467094, 'name': CreateVM_Task} progress is 25%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.412943] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467094, 'name': CreateVM_Task} progress is 25%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.625601] env[69784]: DEBUG nova.compute.manager [req-7dade809-494b-4ead-9598-6ab8641ca5a3 req-f502b6c2-c555-48b1-8a1a-993940724f37 service nova] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Received event network-changed-4a578d79-2da8-4187-bf4e-e9d1318539db {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1064.625718] env[69784]: DEBUG nova.compute.manager [req-7dade809-494b-4ead-9598-6ab8641ca5a3 req-f502b6c2-c555-48b1-8a1a-993940724f37 service nova] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Refreshing instance network info cache due to event network-changed-4a578d79-2da8-4187-bf4e-e9d1318539db. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1064.625925] env[69784]: DEBUG oslo_concurrency.lockutils [req-7dade809-494b-4ead-9598-6ab8641ca5a3 req-f502b6c2-c555-48b1-8a1a-993940724f37 service nova] Acquiring lock "refresh_cache-288af650-a19b-4ce5-baea-013dcaa6e908" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1064.628638] env[69784]: DEBUG oslo_concurrency.lockutils [req-7dade809-494b-4ead-9598-6ab8641ca5a3 req-f502b6c2-c555-48b1-8a1a-993940724f37 service nova] Acquired lock "refresh_cache-288af650-a19b-4ce5-baea-013dcaa6e908" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1064.628638] env[69784]: DEBUG nova.network.neutron [req-7dade809-494b-4ead-9598-6ab8641ca5a3 req-f502b6c2-c555-48b1-8a1a-993940724f37 service nova] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Refreshing network info cache for port 4a578d79-2da8-4187-bf4e-e9d1318539db {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1064.914337] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467094, 'name': CreateVM_Task} progress is 25%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.166317] env[69784]: DEBUG nova.network.neutron [req-7dade809-494b-4ead-9598-6ab8641ca5a3 req-f502b6c2-c555-48b1-8a1a-993940724f37 service nova] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Updated VIF entry in instance network info cache for port 4a578d79-2da8-4187-bf4e-e9d1318539db. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1065.166805] env[69784]: DEBUG nova.network.neutron [req-7dade809-494b-4ead-9598-6ab8641ca5a3 req-f502b6c2-c555-48b1-8a1a-993940724f37 service nova] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Updating instance_info_cache with network_info: [{"id": "4a578d79-2da8-4187-bf4e-e9d1318539db", "address": "fa:16:3e:ce:f0:0c", "network": {"id": "4d101edd-9f9a-4911-90c3-04d47086524d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-625414710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f96715424394efda73ca69f1a7b4a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a578d79-2d", "ovs_interfaceid": "4a578d79-2da8-4187-bf4e-e9d1318539db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.177931] env[69784]: DEBUG oslo_concurrency.lockutils [req-7dade809-494b-4ead-9598-6ab8641ca5a3 req-f502b6c2-c555-48b1-8a1a-993940724f37 service nova] Releasing lock "refresh_cache-288af650-a19b-4ce5-baea-013dcaa6e908" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1065.418596] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467094, 'name': CreateVM_Task, 'duration_secs': 2.350704} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.418778] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1065.422475] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1065.422475] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1065.422475] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1065.422475] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42299ab8-aea8-42c8-a31e-06c1fc120a19 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.425017] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Waiting for the task: (returnval){ [ 1065.425017] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5248e212-e209-616c-d27d-819b3574253d" [ 1065.425017] env[69784]: _type = "Task" [ 1065.425017] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.435804] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5248e212-e209-616c-d27d-819b3574253d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.940265] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1065.940265] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1065.940265] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1070.303249] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquiring lock "e6d05e25-386e-43d1-aec4-d62b9476891d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1070.303645] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1087.778056] env[69784]: DEBUG oslo_concurrency.lockutils [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "288af650-a19b-4ce5-baea-013dcaa6e908" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1089.690998] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1089.691365] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1091.839185] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.840454] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.840738] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1093.689359] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3ac0a0ce-e596-4b32-b96f-3df20191f312 tempest-ServerRescueTestJSONUnderV235-56349946 tempest-ServerRescueTestJSONUnderV235-56349946-project-member] Acquiring lock "f3520c81-2ace-4113-8812-11334cc2f509" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1093.689548] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3ac0a0ce-e596-4b32-b96f-3df20191f312 tempest-ServerRescueTestJSONUnderV235-56349946 tempest-ServerRescueTestJSONUnderV235-56349946-project-member] Lock "f3520c81-2ace-4113-8812-11334cc2f509" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1093.840349] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.840582] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.404233] env[69784]: DEBUG oslo_concurrency.lockutils [None req-39264a63-68c4-4e81-af74-299d20a7a5eb tempest-ServerDiagnosticsNegativeTest-1424102192 tempest-ServerDiagnosticsNegativeTest-1424102192-project-member] Acquiring lock "13fd6e03-4f8d-4e3c-a063-fd27362852e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1094.404473] env[69784]: DEBUG oslo_concurrency.lockutils [None req-39264a63-68c4-4e81-af74-299d20a7a5eb tempest-ServerDiagnosticsNegativeTest-1424102192 tempest-ServerDiagnosticsNegativeTest-1424102192-project-member] Lock "13fd6e03-4f8d-4e3c-a063-fd27362852e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1095.839794] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.841623] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.841623] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1097.841623] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1097.860810] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.860970] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.861118] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.861247] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.861372] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.861495] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.861617] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.861735] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.861896] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.862024] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1097.862149] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1098.302080] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c1197fe9-48b5-4f2d-8af7-f6b4bcd0c047 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Acquiring lock "59b297a2-1b8e-49e5-8c7d-2e585d109f94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1098.302358] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c1197fe9-48b5-4f2d-8af7-f6b4bcd0c047 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Lock "59b297a2-1b8e-49e5-8c7d-2e585d109f94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1098.840037] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.840037] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.851643] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1098.851958] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1098.852064] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1098.852232] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1098.853403] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8dcf81-887f-4a7b-a8a5-b66dc989009d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.864077] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c1aa1c-dd99-4bbd-affb-bad17c0eae36 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.877388] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50abcb04-e9df-4ac8-b742-a07fb24a95dd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.883570] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da683fe-9ac5-47f2-bc7e-735266d8d2a1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.913164] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180937MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1098.913321] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1098.913506] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1098.984281] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c43ca674-06b8-4b5d-a709-2df095b509f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.984460] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.984590] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 65601835-8d30-46b8-b928-b3912d058c6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.984713] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.984833] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.984954] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.985085] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.985204] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.985321] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.985435] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.996569] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f763fb3a-9db8-457a-a713-9aa2abf9e440 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.007216] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6d0fb95f-194e-49ca-8992-e2cec634a5bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.018030] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 47224bae-e259-4517-a379-0561e3812057 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.027911] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 82981529-56c3-43c0-8d33-c2f2b0875bfc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.038115] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.047418] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 34f889c0-3105-49d3-a2f0-9cf250ab3c4b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.056999] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 322c8dbc-6c6f-4343-ba39-8301c886210c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.068147] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f8902bba-d976-47c2-b034-a9438b6c467b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.079820] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4091d39a-80ff-43f9-8194-e995838ecb0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.088662] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 098647d8-eb8e-4494-8458-857e152e0ff8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.098647] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 210a8b8c-13f0-4cb9-9d92-2b018291011b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.108058] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4f0df17c-a95e-424c-8444-8cea31190bae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.117490] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.126728] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 5892a6e6-2829-48a0-877d-6c4307861a05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.136211] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.144983] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.154042] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f3520c81-2ace-4113-8812-11334cc2f509 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.163347] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 13fd6e03-4f8d-4e3c-a063-fd27362852e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.173043] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 59b297a2-1b8e-49e5-8c7d-2e585d109f94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.173043] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1099.173043] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1099.501658] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d23b43-c503-4b77-b2d1-69bf8ef708c2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.510019] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318634e1-f008-4b62-bcb5-0c0dea788cab {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.539406] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef59c97f-e182-49c6-ac01-1b808f043cfd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.546424] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85542d42-2adb-4971-af44-c63ccbc874a0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.559125] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.567585] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1099.582211] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1099.582334] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.669s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1100.582244] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.835512] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.975206] env[69784]: DEBUG oslo_concurrency.lockutils [None req-91987234-59fd-4256-bbb5-c7d7bba2198d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "20518e8e-51f0-4d53-827f-f0c1a57b3bc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1102.975521] env[69784]: DEBUG oslo_concurrency.lockutils [None req-91987234-59fd-4256-bbb5-c7d7bba2198d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "20518e8e-51f0-4d53-827f-f0c1a57b3bc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1104.695368] env[69784]: DEBUG oslo_concurrency.lockutils [None req-321dba4b-ff64-4362-8433-89a4626888fb tempest-ServerShowV257Test-938327008 tempest-ServerShowV257Test-938327008-project-member] Acquiring lock "cb507720-ad76-406c-b4f1-4d08fda1804d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1104.695679] env[69784]: DEBUG oslo_concurrency.lockutils [None req-321dba4b-ff64-4362-8433-89a4626888fb tempest-ServerShowV257Test-938327008 tempest-ServerShowV257Test-938327008-project-member] Lock "cb507720-ad76-406c-b4f1-4d08fda1804d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1107.448474] env[69784]: WARNING oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1107.448474] env[69784]: ERROR oslo_vmware.rw_handles [ 1107.449056] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1107.450997] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1107.451196] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Copying Virtual Disk [datastore1] vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/f7c34596-e81d-4ab4-9695-2532a53d4f76/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1107.451492] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8905640b-6241-4ab7-bc9d-5fab3d935b7b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.460244] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1107.460244] env[69784]: value = "task-3467095" [ 1107.460244] env[69784]: _type = "Task" [ 1107.460244] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.467817] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.970477] env[69784]: DEBUG oslo_vmware.exceptions [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1107.970787] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1107.971341] env[69784]: ERROR nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1107.971341] env[69784]: Faults: ['InvalidArgument'] [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Traceback (most recent call last): [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] yield resources [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] self.driver.spawn(context, instance, image_meta, [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] self._fetch_image_if_missing(context, vi) [ 1107.971341] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] image_cache(vi, tmp_image_ds_loc) [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] vm_util.copy_virtual_disk( [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] session._wait_for_task(vmdk_copy_task) [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] return self.wait_for_task(task_ref) [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] return evt.wait() [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] result = hub.switch() [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1107.971713] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] return self.greenlet.switch() [ 1107.972117] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1107.972117] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] self.f(*self.args, **self.kw) [ 1107.972117] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1107.972117] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] raise exceptions.translate_fault(task_info.error) [ 1107.972117] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1107.972117] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Faults: ['InvalidArgument'] [ 1107.972117] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] [ 1107.972117] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Terminating instance [ 1107.973245] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1107.973447] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1107.974061] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1107.974498] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1107.974498] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7aea8f24-201d-4697-9427-38876bcbb891 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.976703] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1952f1-d878-4005-9afb-5dd727a2216a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.983775] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1107.983977] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89dc8a2b-71fe-45d9-a341-29dc051e4047 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.986040] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1107.986218] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1107.987134] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a5b6234-b03d-49a3-82a6-449f0c2c09b0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.991556] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1107.991556] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52789b62-baa2-b87d-5f6e-7dad7b7942ca" [ 1107.991556] env[69784]: _type = "Task" [ 1107.991556] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.046639] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1108.046868] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1108.047065] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleting the datastore file [datastore1] c43ca674-06b8-4b5d-a709-2df095b509f3 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1108.047500] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-228ce141-9dd9-4a67-b150-79385e6c7e6a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.054148] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1108.054148] env[69784]: value = "task-3467097" [ 1108.054148] env[69784]: _type = "Task" [ 1108.054148] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.062356] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.502606] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1108.502891] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating directory with path [datastore1] vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.503162] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30a41b11-5375-496e-a6f1-3018a470157e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.515059] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Created directory with path [datastore1] vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.515256] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Fetch image to [datastore1] vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1108.515419] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1108.516220] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b005f54e-7f18-4ebf-b287-299c7ce53570 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.523391] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c177907-bf20-4751-af0d-c9f79147f56a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.532705] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa9eade-5f00-48fc-be53-d22145ba35f6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.568019] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3e5fe8-aebe-4e58-a0ec-6b64c9a6eaa6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.574877] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078598} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.576349] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1108.576534] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1108.576705] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1108.576874] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1108.578684] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-114baa2d-0112-46ce-b2e7-456ba84911db {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.580683] env[69784]: DEBUG nova.compute.claims [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1108.580889] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1108.581133] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1108.603456] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1108.657665] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1108.718766] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1108.718978] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1109.026018] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4f2cb7-4314-4cd2-8ad5-3e69029304b7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.033694] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcaf5b8-27b8-4ae7-87bd-680731cbcd74 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.062655] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9696893e-6904-4b62-8abf-70dac77611ce {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.069701] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34858255-5229-4278-a80f-f922261157a7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.082659] env[69784]: DEBUG nova.compute.provider_tree [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.091572] env[69784]: DEBUG nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1109.105235] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.524s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1109.105765] env[69784]: ERROR nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1109.105765] env[69784]: Faults: ['InvalidArgument'] [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Traceback (most recent call last): [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] self.driver.spawn(context, instance, image_meta, [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] self._fetch_image_if_missing(context, vi) [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] image_cache(vi, tmp_image_ds_loc) [ 1109.105765] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] vm_util.copy_virtual_disk( [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] session._wait_for_task(vmdk_copy_task) [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] return self.wait_for_task(task_ref) [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] return evt.wait() [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] result = hub.switch() [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] return self.greenlet.switch() [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1109.106202] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] self.f(*self.args, **self.kw) [ 1109.106718] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1109.106718] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] raise exceptions.translate_fault(task_info.error) [ 1109.106718] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1109.106718] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Faults: ['InvalidArgument'] [ 1109.106718] env[69784]: ERROR nova.compute.manager [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] [ 1109.106718] env[69784]: DEBUG nova.compute.utils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1109.107839] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Build of instance c43ca674-06b8-4b5d-a709-2df095b509f3 was re-scheduled: A specified parameter was not correct: fileType [ 1109.107839] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1109.108218] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1109.108388] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1109.108552] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1109.108710] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1109.708543] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.720739] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Took 0.61 seconds to deallocate network for instance. [ 1109.843581] env[69784]: INFO nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleted allocations for instance c43ca674-06b8-4b5d-a709-2df095b509f3 [ 1109.869233] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "c43ca674-06b8-4b5d-a709-2df095b509f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 426.297s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1109.869233] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "c43ca674-06b8-4b5d-a709-2df095b509f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 224.232s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1109.869233] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "c43ca674-06b8-4b5d-a709-2df095b509f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1109.869557] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "c43ca674-06b8-4b5d-a709-2df095b509f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1109.869557] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "c43ca674-06b8-4b5d-a709-2df095b509f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1109.870606] env[69784]: INFO nova.compute.manager [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Terminating instance [ 1109.872553] env[69784]: DEBUG nova.compute.manager [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1109.872844] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1109.873627] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b255ae8-33e4-42f5-a391-ddee93ae5660 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.880162] env[69784]: DEBUG nova.compute.manager [None req-f6225231-2c3a-4e91-882d-4fc844405a50 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] [instance: d48c4130-2875-4704-bbb5-75c17fd497c8] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1109.888361] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9bda9b-b744-4288-97bc-d9b28b0f3a93 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.905662] env[69784]: DEBUG nova.compute.manager [None req-f6225231-2c3a-4e91-882d-4fc844405a50 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] [instance: d48c4130-2875-4704-bbb5-75c17fd497c8] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1109.917581] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c43ca674-06b8-4b5d-a709-2df095b509f3 could not be found. [ 1109.917780] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1109.917957] env[69784]: INFO nova.compute.manager [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1109.918223] env[69784]: DEBUG oslo.service.loopingcall [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1109.918418] env[69784]: DEBUG nova.compute.manager [-] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1109.918514] env[69784]: DEBUG nova.network.neutron [-] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1109.934536] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f6225231-2c3a-4e91-882d-4fc844405a50 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Lock "d48c4130-2875-4704-bbb5-75c17fd497c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.512s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1109.947070] env[69784]: DEBUG nova.compute.manager [None req-c36439ae-ff2d-45e9-9293-4ecefca9d159 tempest-ServerActionsTestOtherA-926455394 tempest-ServerActionsTestOtherA-926455394-project-member] [instance: f763fb3a-9db8-457a-a713-9aa2abf9e440] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1109.963101] env[69784]: DEBUG nova.network.neutron [-] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.973975] env[69784]: DEBUG nova.compute.manager [None req-c36439ae-ff2d-45e9-9293-4ecefca9d159 tempest-ServerActionsTestOtherA-926455394 tempest-ServerActionsTestOtherA-926455394-project-member] [instance: f763fb3a-9db8-457a-a713-9aa2abf9e440] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1109.976911] env[69784]: INFO nova.compute.manager [-] [instance: c43ca674-06b8-4b5d-a709-2df095b509f3] Took 0.06 seconds to deallocate network for instance. [ 1109.995850] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c36439ae-ff2d-45e9-9293-4ecefca9d159 tempest-ServerActionsTestOtherA-926455394 tempest-ServerActionsTestOtherA-926455394-project-member] Lock "f763fb3a-9db8-457a-a713-9aa2abf9e440" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.759s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1110.006871] env[69784]: DEBUG nova.compute.manager [None req-017e0973-a8ff-4a8f-b16a-801558fcc07c tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 6d0fb95f-194e-49ca-8992-e2cec634a5bf] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1110.043185] env[69784]: DEBUG nova.compute.manager [None req-017e0973-a8ff-4a8f-b16a-801558fcc07c tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 6d0fb95f-194e-49ca-8992-e2cec634a5bf] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1110.068435] env[69784]: DEBUG oslo_concurrency.lockutils [None req-017e0973-a8ff-4a8f-b16a-801558fcc07c tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "6d0fb95f-194e-49ca-8992-e2cec634a5bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.361s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1110.073019] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d8972bc0-a25a-4808-bec1-a24bc17ac0ce tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "c43ca674-06b8-4b5d-a709-2df095b509f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1110.077900] env[69784]: DEBUG nova.compute.manager [None req-441143a3-1bc9-4dff-9df1-fcbb86f4a568 tempest-ServersTestManualDisk-160721202 tempest-ServersTestManualDisk-160721202-project-member] [instance: 47224bae-e259-4517-a379-0561e3812057] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1110.108126] env[69784]: DEBUG nova.compute.manager [None req-441143a3-1bc9-4dff-9df1-fcbb86f4a568 tempest-ServersTestManualDisk-160721202 tempest-ServersTestManualDisk-160721202-project-member] [instance: 47224bae-e259-4517-a379-0561e3812057] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1110.131419] env[69784]: DEBUG oslo_concurrency.lockutils [None req-441143a3-1bc9-4dff-9df1-fcbb86f4a568 tempest-ServersTestManualDisk-160721202 tempest-ServersTestManualDisk-160721202-project-member] Lock "47224bae-e259-4517-a379-0561e3812057" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.893s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1110.142513] env[69784]: DEBUG nova.compute.manager [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1110.193823] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1110.194084] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1110.195706] env[69784]: INFO nova.compute.claims [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1110.644610] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41343cd5-e997-4ae6-9efd-07999e95cede {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.652659] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbea644-3cac-4230-a94e-d2f8ed52d431 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.692649] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed497622-820b-4658-850c-339040a880b5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.699870] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a21507-48fb-4cd4-8c9b-5102f5cdd35a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.713677] env[69784]: DEBUG nova.compute.provider_tree [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.722635] env[69784]: DEBUG nova.scheduler.client.report [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1110.736247] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.542s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1110.736787] env[69784]: DEBUG nova.compute.manager [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1110.769558] env[69784]: DEBUG nova.compute.claims [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1110.770172] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1110.772037] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1110.796870] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.026s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1110.798287] env[69784]: DEBUG nova.compute.utils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Instance 82981529-56c3-43c0-8d33-c2f2b0875bfc could not be found. {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1110.800062] env[69784]: DEBUG nova.compute.manager [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Instance disappeared during build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1110.800531] env[69784]: DEBUG nova.compute.manager [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1110.800977] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Acquiring lock "refresh_cache-82981529-56c3-43c0-8d33-c2f2b0875bfc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1110.801213] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Acquired lock "refresh_cache-82981529-56c3-43c0-8d33-c2f2b0875bfc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1110.801458] env[69784]: DEBUG nova.network.neutron [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1110.809996] env[69784]: DEBUG nova.compute.utils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Can not refresh info_cache because instance was not found {{(pid=69784) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1010}} [ 1110.839468] env[69784]: DEBUG nova.network.neutron [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1111.288376] env[69784]: DEBUG nova.network.neutron [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.304070] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Releasing lock "refresh_cache-82981529-56c3-43c0-8d33-c2f2b0875bfc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1111.304070] env[69784]: DEBUG nova.compute.manager [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1111.304070] env[69784]: DEBUG nova.compute.manager [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1111.304070] env[69784]: DEBUG nova.network.neutron [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1111.328024] env[69784]: DEBUG nova.network.neutron [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1111.335642] env[69784]: DEBUG nova.network.neutron [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.349470] env[69784]: INFO nova.compute.manager [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Took 0.05 seconds to deallocate network for instance. [ 1111.403262] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9608b91-f8da-424c-a473-33914a56d794 tempest-ServersTestJSON-328276705 tempest-ServersTestJSON-328276705-project-member] Lock "82981529-56c3-43c0-8d33-c2f2b0875bfc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.569s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1111.412397] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1111.482439] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1111.482485] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1111.484516] env[69784]: INFO nova.compute.claims [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1111.943788] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022ccedb-542a-4a60-87f8-821c42bf7f4d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.953517] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7820519b-3b88-4490-b546-d1b64a3614b0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.990515] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0762206-95eb-4a08-ba48-ebd0f385ca21 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.999741] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5153aa5-4eee-4eb6-9cb3-31654cad7703 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.012696] env[69784]: DEBUG nova.compute.provider_tree [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.027992] env[69784]: DEBUG nova.scheduler.client.report [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1112.045138] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.561s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1112.045138] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1112.085357] env[69784]: DEBUG nova.compute.utils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1112.087816] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1112.088948] env[69784]: DEBUG nova.network.neutron [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1112.098818] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1112.183788] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1112.215694] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1112.215943] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1112.216111] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1112.216296] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1112.216946] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1112.216946] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1112.216946] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1112.216946] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1112.217132] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1112.217273] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1112.217450] env[69784]: DEBUG nova.virt.hardware [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1112.218308] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122ee256-64d6-461e-91e8-63c463a16282 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.230776] env[69784]: DEBUG nova.policy [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9377793b609045d39998428a593965d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe0e5b71c37c4d85802b252a5869eac9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1112.233357] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a74c308-d480-462c-a4a6-7dfed030f5a8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.144446] env[69784]: DEBUG nova.network.neutron [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Successfully created port: 07d13b83-b2f6-44f9-8b90-1886738243bf {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1114.147175] env[69784]: DEBUG nova.compute.manager [req-e8966bd2-be1e-4600-9fe9-6a44590ae3ae req-e5af7d26-3026-4222-831b-b7de87c28866 service nova] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Received event network-vif-plugged-07d13b83-b2f6-44f9-8b90-1886738243bf {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1114.147175] env[69784]: DEBUG oslo_concurrency.lockutils [req-e8966bd2-be1e-4600-9fe9-6a44590ae3ae req-e5af7d26-3026-4222-831b-b7de87c28866 service nova] Acquiring lock "6109a6f5-11ea-4983-b271-f84aa859d6cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1114.147175] env[69784]: DEBUG oslo_concurrency.lockutils [req-e8966bd2-be1e-4600-9fe9-6a44590ae3ae req-e5af7d26-3026-4222-831b-b7de87c28866 service nova] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1114.147175] env[69784]: DEBUG oslo_concurrency.lockutils [req-e8966bd2-be1e-4600-9fe9-6a44590ae3ae req-e5af7d26-3026-4222-831b-b7de87c28866 service nova] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1114.147567] env[69784]: DEBUG nova.compute.manager [req-e8966bd2-be1e-4600-9fe9-6a44590ae3ae req-e5af7d26-3026-4222-831b-b7de87c28866 service nova] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] No waiting events found dispatching network-vif-plugged-07d13b83-b2f6-44f9-8b90-1886738243bf {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1114.147898] env[69784]: WARNING nova.compute.manager [req-e8966bd2-be1e-4600-9fe9-6a44590ae3ae req-e5af7d26-3026-4222-831b-b7de87c28866 service nova] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Received unexpected event network-vif-plugged-07d13b83-b2f6-44f9-8b90-1886738243bf for instance with vm_state building and task_state spawning. [ 1114.242152] env[69784]: DEBUG nova.network.neutron [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Successfully updated port: 07d13b83-b2f6-44f9-8b90-1886738243bf {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1114.256740] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquiring lock "refresh_cache-6109a6f5-11ea-4983-b271-f84aa859d6cd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1114.256900] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquired lock "refresh_cache-6109a6f5-11ea-4983-b271-f84aa859d6cd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1114.257072] env[69784]: DEBUG nova.network.neutron [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1114.297908] env[69784]: DEBUG nova.network.neutron [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1114.521512] env[69784]: DEBUG nova.network.neutron [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Updating instance_info_cache with network_info: [{"id": "07d13b83-b2f6-44f9-8b90-1886738243bf", "address": "fa:16:3e:70:27:d5", "network": {"id": "9eacbbe0-ac38-4a98-83ea-b8b281d96fed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-768542737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe0e5b71c37c4d85802b252a5869eac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d13b83-b2", "ovs_interfaceid": "07d13b83-b2f6-44f9-8b90-1886738243bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.543758] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Releasing lock "refresh_cache-6109a6f5-11ea-4983-b271-f84aa859d6cd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1114.544272] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Instance network_info: |[{"id": "07d13b83-b2f6-44f9-8b90-1886738243bf", "address": "fa:16:3e:70:27:d5", "network": {"id": "9eacbbe0-ac38-4a98-83ea-b8b281d96fed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-768542737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe0e5b71c37c4d85802b252a5869eac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d13b83-b2", "ovs_interfaceid": "07d13b83-b2f6-44f9-8b90-1886738243bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1114.545108] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:27:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea6e81c3-94aa-40a6-a4d4-7f338b503442', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07d13b83-b2f6-44f9-8b90-1886738243bf', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.553443] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Creating folder: Project (fe0e5b71c37c4d85802b252a5869eac9). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1114.554623] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7c0359b-abbc-4030-8a77-0125b266ebcb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.567040] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Created folder: Project (fe0e5b71c37c4d85802b252a5869eac9) in parent group-v692547. [ 1114.567315] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Creating folder: Instances. Parent ref: group-v692604. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1114.567818] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfd017c4-e7f2-4bfe-b8c5-58b9fe7e63c1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.576348] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Created folder: Instances in parent group-v692604. [ 1114.576574] env[69784]: DEBUG oslo.service.loopingcall [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1114.576755] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1114.576939] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d8b7e3c-e3d2-44ff-9d6e-9c6f4f17f66e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.596071] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.596071] env[69784]: value = "task-3467100" [ 1114.596071] env[69784]: _type = "Task" [ 1114.596071] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.604793] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467100, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.106511] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467100, 'name': CreateVM_Task, 'duration_secs': 0.332926} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.106794] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1115.107402] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1115.107568] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1115.107906] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1115.108176] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8592cf0f-37db-4fc7-b670-ac368fab2ccc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.113593] env[69784]: DEBUG oslo_vmware.api [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Waiting for the task: (returnval){ [ 1115.113593] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52be99b3-42d8-de18-e4fd-0250b701c748" [ 1115.113593] env[69784]: _type = "Task" [ 1115.113593] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.121199] env[69784]: DEBUG oslo_vmware.api [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52be99b3-42d8-de18-e4fd-0250b701c748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.623346] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1115.623624] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1115.623799] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1116.187051] env[69784]: DEBUG nova.compute.manager [req-8a9da02b-dc79-4e51-925b-b1624389b3c8 req-04233aa2-892f-4237-a7f9-03e2f0aafcde service nova] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Received event network-changed-07d13b83-b2f6-44f9-8b90-1886738243bf {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1116.187270] env[69784]: DEBUG nova.compute.manager [req-8a9da02b-dc79-4e51-925b-b1624389b3c8 req-04233aa2-892f-4237-a7f9-03e2f0aafcde service nova] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Refreshing instance network info cache due to event network-changed-07d13b83-b2f6-44f9-8b90-1886738243bf. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1116.187484] env[69784]: DEBUG oslo_concurrency.lockutils [req-8a9da02b-dc79-4e51-925b-b1624389b3c8 req-04233aa2-892f-4237-a7f9-03e2f0aafcde service nova] Acquiring lock "refresh_cache-6109a6f5-11ea-4983-b271-f84aa859d6cd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1116.187628] env[69784]: DEBUG oslo_concurrency.lockutils [req-8a9da02b-dc79-4e51-925b-b1624389b3c8 req-04233aa2-892f-4237-a7f9-03e2f0aafcde service nova] Acquired lock "refresh_cache-6109a6f5-11ea-4983-b271-f84aa859d6cd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1116.187790] env[69784]: DEBUG nova.network.neutron [req-8a9da02b-dc79-4e51-925b-b1624389b3c8 req-04233aa2-892f-4237-a7f9-03e2f0aafcde service nova] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Refreshing network info cache for port 07d13b83-b2f6-44f9-8b90-1886738243bf {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1116.462090] env[69784]: DEBUG nova.network.neutron [req-8a9da02b-dc79-4e51-925b-b1624389b3c8 req-04233aa2-892f-4237-a7f9-03e2f0aafcde service nova] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Updated VIF entry in instance network info cache for port 07d13b83-b2f6-44f9-8b90-1886738243bf. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1116.462502] env[69784]: DEBUG nova.network.neutron [req-8a9da02b-dc79-4e51-925b-b1624389b3c8 req-04233aa2-892f-4237-a7f9-03e2f0aafcde service nova] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Updating instance_info_cache with network_info: [{"id": "07d13b83-b2f6-44f9-8b90-1886738243bf", "address": "fa:16:3e:70:27:d5", "network": {"id": "9eacbbe0-ac38-4a98-83ea-b8b281d96fed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-768542737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe0e5b71c37c4d85802b252a5869eac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d13b83-b2", "ovs_interfaceid": "07d13b83-b2f6-44f9-8b90-1886738243bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.471694] env[69784]: DEBUG oslo_concurrency.lockutils [req-8a9da02b-dc79-4e51-925b-b1624389b3c8 req-04233aa2-892f-4237-a7f9-03e2f0aafcde service nova] Releasing lock "refresh_cache-6109a6f5-11ea-4983-b271-f84aa859d6cd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1121.282487] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquiring lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1131.728183] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "91546cc5-6e8b-4175-b256-ba19e98c22cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1131.728183] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "91546cc5-6e8b-4175-b256-ba19e98c22cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1133.273706] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56d397f7-2662-4b81-bfbf-4a7322d2e921 tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] Acquiring lock "9b208c32-b233-4312-902a-3e4be0ddb23b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1133.274336] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56d397f7-2662-4b81-bfbf-4a7322d2e921 tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] Lock "9b208c32-b233-4312-902a-3e4be0ddb23b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1135.975251] env[69784]: DEBUG oslo_concurrency.lockutils [None req-83927e31-b126-4eb6-a686-b6c4d2b9b837 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Acquiring lock "2d4c834f-d9da-497f-8863-28c30e11c113" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1135.975534] env[69784]: DEBUG oslo_concurrency.lockutils [None req-83927e31-b126-4eb6-a686-b6c4d2b9b837 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Lock "2d4c834f-d9da-497f-8863-28c30e11c113" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1139.254666] env[69784]: DEBUG oslo_concurrency.lockutils [None req-582556ac-5ebd-4902-969a-d3c2cc4a87a2 tempest-ServerActionsTestJSON-1763851275 tempest-ServerActionsTestJSON-1763851275-project-member] Acquiring lock "d165cf46-31ba-40e3-b7e9-fad5f05242ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1139.254958] env[69784]: DEBUG oslo_concurrency.lockutils [None req-582556ac-5ebd-4902-969a-d3c2cc4a87a2 tempest-ServerActionsTestJSON-1763851275 tempest-ServerActionsTestJSON-1763851275-project-member] Lock "d165cf46-31ba-40e3-b7e9-fad5f05242ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1146.272825] env[69784]: DEBUG oslo_concurrency.lockutils [None req-888ef8bb-c597-455d-a70a-8aafca51dff7 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "8c4c0989-5269-4b6a-9b5a-778803657608" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1146.273216] env[69784]: DEBUG oslo_concurrency.lockutils [None req-888ef8bb-c597-455d-a70a-8aafca51dff7 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "8c4c0989-5269-4b6a-9b5a-778803657608" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1152.840316] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.839818] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.840064] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1154.841029] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.840242] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1156.840060] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.465542] env[69784]: WARNING oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1157.465542] env[69784]: ERROR oslo_vmware.rw_handles [ 1157.466047] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1157.467778] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1157.468023] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Copying Virtual Disk [datastore1] vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/93496e5b-3519-427c-8bcf-64ba32fe739f/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1157.468315] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bef7ccfa-4f37-4f0e-a8bc-2eac11048d65 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.477202] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1157.477202] env[69784]: value = "task-3467101" [ 1157.477202] env[69784]: _type = "Task" [ 1157.477202] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.485259] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.987692] env[69784]: DEBUG oslo_vmware.exceptions [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1157.987990] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1157.988555] env[69784]: ERROR nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1157.988555] env[69784]: Faults: ['InvalidArgument'] [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Traceback (most recent call last): [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] yield resources [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] self.driver.spawn(context, instance, image_meta, [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] self._fetch_image_if_missing(context, vi) [ 1157.988555] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] image_cache(vi, tmp_image_ds_loc) [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] vm_util.copy_virtual_disk( [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] session._wait_for_task(vmdk_copy_task) [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] return self.wait_for_task(task_ref) [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] return evt.wait() [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] result = hub.switch() [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1157.988937] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] return self.greenlet.switch() [ 1157.989317] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1157.989317] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] self.f(*self.args, **self.kw) [ 1157.989317] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1157.989317] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] raise exceptions.translate_fault(task_info.error) [ 1157.989317] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1157.989317] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Faults: ['InvalidArgument'] [ 1157.989317] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] [ 1157.989317] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Terminating instance [ 1157.990424] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1157.990639] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1157.990883] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f163bbbd-dfec-4aa7-bf44-a7bc21d5ba2e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.993253] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1157.993451] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1157.994170] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8172e184-bf9e-435c-b439-6bb3cb1c5126 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.001050] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1158.001310] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c1ed462-448e-4cf0-a5f0-cb03a2b8b73b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.003389] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1158.003557] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1158.004485] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bb55829-6236-4cc4-b67c-7a99906f5560 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.009078] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1158.009078] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5294a6b4-a926-c045-bc4e-1cf7f3ddf439" [ 1158.009078] env[69784]: _type = "Task" [ 1158.009078] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.015684] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5294a6b4-a926-c045-bc4e-1cf7f3ddf439, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.137892] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1158.138138] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1158.138318] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleting the datastore file [datastore1] 0d0d1503-5522-4c0d-9096-2f25ed0fd7df {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1158.138589] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0ba4a76-511d-4856-8cb1-2f4d0ad1bc5d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.145199] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1158.145199] env[69784]: value = "task-3467103" [ 1158.145199] env[69784]: _type = "Task" [ 1158.145199] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.153551] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.520121] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1158.520394] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating directory with path [datastore1] vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1158.520706] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d381c88c-4380-4400-b92d-9ccdc67a26e1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.533041] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Created directory with path [datastore1] vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1158.533041] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Fetch image to [datastore1] vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1158.533041] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1158.533343] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5cb45e5-99c2-41d1-ad17-6e1596786e4a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.541116] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac0d39e-7472-4a42-b99f-5ecee44259e1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.549893] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db23760b-e597-404c-b3a8-fdab69505d3a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.581097] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3de532-f239-49c9-bfad-782457fd0146 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.586987] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9372f410-9b4b-4f84-ac02-22d18168936c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.606795] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1158.653555] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072645} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.653833] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1158.654038] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1158.654218] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1158.654419] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1158.656416] env[69784]: DEBUG nova.compute.claims [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1158.656590] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1158.656804] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1158.659974] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1158.720902] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1158.721111] env[69784]: DEBUG oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1158.840720] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.853699] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1158.984148] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8349bd4d-b15d-4d06-9e9f-33d54c8c1271 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.993214] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767ed067-f3aa-4269-a238-d3e360e14c8c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.022998] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6b1aca-5da9-497a-ab34-6770f1977b59 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.031058] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677ecddb-e07f-4f48-afa8-412f445ec2e6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.044475] env[69784]: DEBUG nova.compute.provider_tree [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.054807] env[69784]: DEBUG nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1159.087503] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.431s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.088046] env[69784]: ERROR nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1159.088046] env[69784]: Faults: ['InvalidArgument'] [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Traceback (most recent call last): [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] self.driver.spawn(context, instance, image_meta, [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] self._fetch_image_if_missing(context, vi) [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] image_cache(vi, tmp_image_ds_loc) [ 1159.088046] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] vm_util.copy_virtual_disk( [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] session._wait_for_task(vmdk_copy_task) [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] return self.wait_for_task(task_ref) [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] return evt.wait() [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] result = hub.switch() [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] return self.greenlet.switch() [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1159.088425] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] self.f(*self.args, **self.kw) [ 1159.088780] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1159.088780] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] raise exceptions.translate_fault(task_info.error) [ 1159.088780] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1159.088780] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Faults: ['InvalidArgument'] [ 1159.088780] env[69784]: ERROR nova.compute.manager [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] [ 1159.088780] env[69784]: DEBUG nova.compute.utils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1159.089804] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.236s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1159.089991] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.090173] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1159.090846] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Build of instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df was re-scheduled: A specified parameter was not correct: fileType [ 1159.090846] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1159.091232] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1159.091403] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1159.091569] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1159.091730] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1159.093993] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198d768f-4256-4632-a2c7-f1444721b705 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.102746] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b401cf-c5cb-4259-92f6-8bb99afc6068 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.116841] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d34524-6f9f-4ec6-b7be-3154fde8811d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.123058] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104f51f6-cb87-4acc-8abf-03625191ebe9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.153517] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180957MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1159.153517] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1159.153517] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1159.232591] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.233494] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 65601835-8d30-46b8-b928-b3912d058c6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.233494] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.233494] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.233494] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.233814] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.233814] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.233814] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.233814] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.233931] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.245239] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.254955] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 5892a6e6-2829-48a0-877d-6c4307861a05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.264751] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.275509] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.286576] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f3520c81-2ace-4113-8812-11334cc2f509 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.296520] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 13fd6e03-4f8d-4e3c-a063-fd27362852e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.309692] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 59b297a2-1b8e-49e5-8c7d-2e585d109f94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.329764] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 20518e8e-51f0-4d53-827f-f0c1a57b3bc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.347911] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cb507720-ad76-406c-b4f1-4d08fda1804d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.370316] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.380820] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9b208c32-b233-4312-902a-3e4be0ddb23b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.393239] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 2d4c834f-d9da-497f-8863-28c30e11c113 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.403361] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d165cf46-31ba-40e3-b7e9-fad5f05242ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.413692] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 8c4c0989-5269-4b6a-9b5a-778803657608 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1159.413958] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1159.414123] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1159.488871] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.500374] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Took 0.41 seconds to deallocate network for instance. [ 1159.622161] env[69784]: INFO nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleted allocations for instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df [ 1159.652426] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 476.049s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.653592] env[69784]: DEBUG oslo_concurrency.lockutils [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 274.259s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1159.653812] env[69784]: DEBUG oslo_concurrency.lockutils [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1159.654037] env[69784]: DEBUG oslo_concurrency.lockutils [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1159.654423] env[69784]: DEBUG oslo_concurrency.lockutils [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.662499] env[69784]: INFO nova.compute.manager [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Terminating instance [ 1159.662800] env[69784]: DEBUG nova.compute.manager [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1159.664016] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1159.664016] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a09ee328-b206-4f59-bf9d-5662e7b82433 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.674719] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d076bab-6266-4cb8-b61c-c83bd45d6066 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.688038] env[69784]: DEBUG nova.compute.manager [None req-1639275f-86ce-4421-96f5-8f6c90d79571 tempest-InstanceActionsV221TestJSON-1929562794 tempest-InstanceActionsV221TestJSON-1929562794-project-member] [instance: 34f889c0-3105-49d3-a2f0-9cf250ab3c4b] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1159.711765] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0d0d1503-5522-4c0d-9096-2f25ed0fd7df could not be found. [ 1159.712023] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1159.712222] env[69784]: INFO nova.compute.manager [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1159.712481] env[69784]: DEBUG oslo.service.loopingcall [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1159.715311] env[69784]: DEBUG nova.compute.manager [-] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1159.715416] env[69784]: DEBUG nova.network.neutron [-] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1159.717999] env[69784]: DEBUG nova.compute.manager [None req-1639275f-86ce-4421-96f5-8f6c90d79571 tempest-InstanceActionsV221TestJSON-1929562794 tempest-InstanceActionsV221TestJSON-1929562794-project-member] [instance: 34f889c0-3105-49d3-a2f0-9cf250ab3c4b] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1159.746129] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1639275f-86ce-4421-96f5-8f6c90d79571 tempest-InstanceActionsV221TestJSON-1929562794 tempest-InstanceActionsV221TestJSON-1929562794-project-member] Lock "34f889c0-3105-49d3-a2f0-9cf250ab3c4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.719s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.749322] env[69784]: DEBUG nova.network.neutron [-] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.759689] env[69784]: DEBUG nova.compute.manager [None req-154224b9-759e-4e2f-a153-09c5b57850bc tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] [instance: 322c8dbc-6c6f-4343-ba39-8301c886210c] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1159.762812] env[69784]: INFO nova.compute.manager [-] [instance: 0d0d1503-5522-4c0d-9096-2f25ed0fd7df] Took 0.05 seconds to deallocate network for instance. [ 1159.791823] env[69784]: DEBUG nova.compute.manager [None req-154224b9-759e-4e2f-a153-09c5b57850bc tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] [instance: 322c8dbc-6c6f-4343-ba39-8301c886210c] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1159.807851] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f39bb3-2d00-4446-a004-19552a016278 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.820672] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d397f911-e94c-442a-a6d3-81371fd088af {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.824789] env[69784]: DEBUG oslo_concurrency.lockutils [None req-154224b9-759e-4e2f-a153-09c5b57850bc tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] Lock "322c8dbc-6c6f-4343-ba39-8301c886210c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.144s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.854135] env[69784]: DEBUG nova.compute.manager [None req-fc063c46-822a-4452-ba02-bfab27bdf9f0 tempest-AttachInterfacesUnderV243Test-814149851 tempest-AttachInterfacesUnderV243Test-814149851-project-member] [instance: f8902bba-d976-47c2-b034-a9438b6c467b] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1159.858023] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03e3f58-5092-49d8-9a31-1dc81c6d8973 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.861928] env[69784]: DEBUG oslo_concurrency.lockutils [None req-23eea3cd-ea85-4c3b-97e3-1393582155b4 tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "0d0d1503-5522-4c0d-9096-2f25ed0fd7df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.867632] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f32497-a2b5-4f98-b19b-13dc38d6953b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.882416] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.883852] env[69784]: DEBUG nova.compute.manager [None req-fc063c46-822a-4452-ba02-bfab27bdf9f0 tempest-AttachInterfacesUnderV243Test-814149851 tempest-AttachInterfacesUnderV243Test-814149851-project-member] [instance: f8902bba-d976-47c2-b034-a9438b6c467b] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1159.890527] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1159.904326] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1159.904520] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.751s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.905646] env[69784]: DEBUG oslo_concurrency.lockutils [None req-fc063c46-822a-4452-ba02-bfab27bdf9f0 tempest-AttachInterfacesUnderV243Test-814149851 tempest-AttachInterfacesUnderV243Test-814149851-project-member] Lock "f8902bba-d976-47c2-b034-a9438b6c467b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.801s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.917957] env[69784]: DEBUG nova.compute.manager [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] [instance: 4091d39a-80ff-43f9-8194-e995838ecb0d] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1159.940692] env[69784]: DEBUG nova.compute.manager [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] [instance: 4091d39a-80ff-43f9-8194-e995838ecb0d] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1159.963198] env[69784]: DEBUG oslo_concurrency.lockutils [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Lock "4091d39a-80ff-43f9-8194-e995838ecb0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.645s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1159.975249] env[69784]: DEBUG nova.compute.manager [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] [instance: 098647d8-eb8e-4494-8458-857e152e0ff8] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1159.998731] env[69784]: DEBUG nova.compute.manager [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] [instance: 098647d8-eb8e-4494-8458-857e152e0ff8] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1160.020303] env[69784]: DEBUG oslo_concurrency.lockutils [None req-317be39b-60ee-4d93-9138-bb2863693bf2 tempest-MultipleCreateTestJSON-1744311742 tempest-MultipleCreateTestJSON-1744311742-project-member] Lock "098647d8-eb8e-4494-8458-857e152e0ff8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.673s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1160.028157] env[69784]: DEBUG nova.compute.manager [None req-1b74c4f0-879a-41dd-9de4-78c434f4e000 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] [instance: 210a8b8c-13f0-4cb9-9d92-2b018291011b] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1160.050697] env[69784]: DEBUG nova.compute.manager [None req-1b74c4f0-879a-41dd-9de4-78c434f4e000 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] [instance: 210a8b8c-13f0-4cb9-9d92-2b018291011b] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1160.072890] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1b74c4f0-879a-41dd-9de4-78c434f4e000 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Lock "210a8b8c-13f0-4cb9-9d92-2b018291011b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.141s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1160.084795] env[69784]: DEBUG nova.compute.manager [None req-2c0f3237-8f24-482b-b118-b21024976172 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4f0df17c-a95e-424c-8444-8cea31190bae] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1160.109034] env[69784]: DEBUG nova.compute.manager [None req-2c0f3237-8f24-482b-b118-b21024976172 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4f0df17c-a95e-424c-8444-8cea31190bae] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1160.129493] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2c0f3237-8f24-482b-b118-b21024976172 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "4f0df17c-a95e-424c-8444-8cea31190bae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.520s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1160.141096] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1160.187978] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.188317] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1160.189628] env[69784]: INFO nova.compute.claims [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1160.556812] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1a1091-0daa-4284-8843-6ad11385a0cf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.564036] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed1cd74-6f01-44de-8b67-39c321ed8e86 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.594430] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4019a4cc-375d-4d45-b134-2570c7ae8aab {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.602052] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5514de4a-a589-42d0-9ced-99fc2eb69f54 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.616399] env[69784]: DEBUG nova.compute.provider_tree [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.625375] env[69784]: DEBUG nova.scheduler.client.report [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1160.640990] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.453s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1160.641512] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1160.676055] env[69784]: DEBUG nova.compute.utils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1160.677654] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1160.677845] env[69784]: DEBUG nova.network.neutron [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1160.686940] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1160.749532] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1160.753261] env[69784]: DEBUG nova.policy [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c93b274686c34049be1b37ef70656616', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0126dda00a44838ac749dee6f266970', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1160.775532] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1160.775801] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1160.775960] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1160.776160] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1160.776309] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1160.776454] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1160.776659] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1160.776821] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1160.777036] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1160.777223] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1160.777398] env[69784]: DEBUG nova.virt.hardware [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1160.778303] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975b80fb-03ca-4372-a24b-9ca050230bcd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.786127] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfd635f-289b-4dc3-a133-fc0a9f16a866 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.899536] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.899770] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.899919] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1160.900053] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1160.923331] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923331] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923331] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923331] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923331] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923595] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923595] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923595] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923595] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923595] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1160.923732] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1160.923897] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.120980] env[69784]: DEBUG nova.network.neutron [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Successfully created port: 83c9f3fd-6934-4f57-8c91-4a93df07a691 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1161.322255] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1161.750542] env[69784]: DEBUG nova.compute.manager [req-d8288cab-73cf-491d-8ccc-f87444361fb8 req-fc8997f8-8258-486f-85d5-b2c297db200a service nova] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Received event network-vif-plugged-83c9f3fd-6934-4f57-8c91-4a93df07a691 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1161.750733] env[69784]: DEBUG oslo_concurrency.lockutils [req-d8288cab-73cf-491d-8ccc-f87444361fb8 req-fc8997f8-8258-486f-85d5-b2c297db200a service nova] Acquiring lock "694e2a62-5f2e-475d-9356-a66651c3e5e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1161.750935] env[69784]: DEBUG oslo_concurrency.lockutils [req-d8288cab-73cf-491d-8ccc-f87444361fb8 req-fc8997f8-8258-486f-85d5-b2c297db200a service nova] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1161.751116] env[69784]: DEBUG oslo_concurrency.lockutils [req-d8288cab-73cf-491d-8ccc-f87444361fb8 req-fc8997f8-8258-486f-85d5-b2c297db200a service nova] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1161.751281] env[69784]: DEBUG nova.compute.manager [req-d8288cab-73cf-491d-8ccc-f87444361fb8 req-fc8997f8-8258-486f-85d5-b2c297db200a service nova] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] No waiting events found dispatching network-vif-plugged-83c9f3fd-6934-4f57-8c91-4a93df07a691 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1161.751435] env[69784]: WARNING nova.compute.manager [req-d8288cab-73cf-491d-8ccc-f87444361fb8 req-fc8997f8-8258-486f-85d5-b2c297db200a service nova] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Received unexpected event network-vif-plugged-83c9f3fd-6934-4f57-8c91-4a93df07a691 for instance with vm_state building and task_state deleting. [ 1161.824451] env[69784]: DEBUG nova.network.neutron [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Successfully updated port: 83c9f3fd-6934-4f57-8c91-4a93df07a691 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1161.838021] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1161.838226] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1161.838383] env[69784]: DEBUG nova.network.neutron [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1161.887621] env[69784]: DEBUG nova.network.neutron [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1162.290309] env[69784]: DEBUG nova.network.neutron [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Updating instance_info_cache with network_info: [{"id": "83c9f3fd-6934-4f57-8c91-4a93df07a691", "address": "fa:16:3e:8b:76:43", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c9f3fd-69", "ovs_interfaceid": "83c9f3fd-6934-4f57-8c91-4a93df07a691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.306398] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1162.306707] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance network_info: |[{"id": "83c9f3fd-6934-4f57-8c91-4a93df07a691", "address": "fa:16:3e:8b:76:43", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c9f3fd-69", "ovs_interfaceid": "83c9f3fd-6934-4f57-8c91-4a93df07a691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1162.307118] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:76:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ff81f9-72b2-4e58-a8d8-5699907f7459', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83c9f3fd-6934-4f57-8c91-4a93df07a691', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1162.314530] env[69784]: DEBUG oslo.service.loopingcall [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1162.315017] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1162.315260] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04ed35aa-e279-422f-974e-8fb02c1bda46 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.335286] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1162.335286] env[69784]: value = "task-3467104" [ 1162.335286] env[69784]: _type = "Task" [ 1162.335286] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.342781] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467104, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.845749] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467104, 'name': CreateVM_Task, 'duration_secs': 0.283789} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.845919] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1162.846549] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1162.846723] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1162.847053] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1162.847299] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b5566f1-b402-4532-8fb1-57bdc2f81683 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.851729] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 1162.851729] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52b31c51-dfd2-1802-860b-79e36229567f" [ 1162.851729] env[69784]: _type = "Task" [ 1162.851729] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.861238] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52b31c51-dfd2-1802-860b-79e36229567f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.363272] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1163.363611] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1163.363917] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1163.821855] env[69784]: DEBUG nova.compute.manager [req-37da698d-fc93-449d-8e70-0eac38aea419 req-2082414d-6b7a-471d-b5f0-14cb33ad8892 service nova] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Received event network-changed-83c9f3fd-6934-4f57-8c91-4a93df07a691 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1163.822157] env[69784]: DEBUG nova.compute.manager [req-37da698d-fc93-449d-8e70-0eac38aea419 req-2082414d-6b7a-471d-b5f0-14cb33ad8892 service nova] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Refreshing instance network info cache due to event network-changed-83c9f3fd-6934-4f57-8c91-4a93df07a691. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1163.822420] env[69784]: DEBUG oslo_concurrency.lockutils [req-37da698d-fc93-449d-8e70-0eac38aea419 req-2082414d-6b7a-471d-b5f0-14cb33ad8892 service nova] Acquiring lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1163.822638] env[69784]: DEBUG oslo_concurrency.lockutils [req-37da698d-fc93-449d-8e70-0eac38aea419 req-2082414d-6b7a-471d-b5f0-14cb33ad8892 service nova] Acquired lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1163.822864] env[69784]: DEBUG nova.network.neutron [req-37da698d-fc93-449d-8e70-0eac38aea419 req-2082414d-6b7a-471d-b5f0-14cb33ad8892 service nova] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Refreshing network info cache for port 83c9f3fd-6934-4f57-8c91-4a93df07a691 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1164.162441] env[69784]: DEBUG nova.network.neutron [req-37da698d-fc93-449d-8e70-0eac38aea419 req-2082414d-6b7a-471d-b5f0-14cb33ad8892 service nova] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Updated VIF entry in instance network info cache for port 83c9f3fd-6934-4f57-8c91-4a93df07a691. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1164.162865] env[69784]: DEBUG nova.network.neutron [req-37da698d-fc93-449d-8e70-0eac38aea419 req-2082414d-6b7a-471d-b5f0-14cb33ad8892 service nova] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Updating instance_info_cache with network_info: [{"id": "83c9f3fd-6934-4f57-8c91-4a93df07a691", "address": "fa:16:3e:8b:76:43", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c9f3fd-69", "ovs_interfaceid": "83c9f3fd-6934-4f57-8c91-4a93df07a691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.172751] env[69784]: DEBUG oslo_concurrency.lockutils [req-37da698d-fc93-449d-8e70-0eac38aea419 req-2082414d-6b7a-471d-b5f0-14cb33ad8892 service nova] Releasing lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1208.438198] env[69784]: WARNING oslo_vmware.rw_handles [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1208.438198] env[69784]: ERROR oslo_vmware.rw_handles [ 1208.438786] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1208.441235] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1208.441531] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Copying Virtual Disk [datastore1] vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/ff71e4fa-f083-4c42-990c-484ac21aeeab/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1208.441883] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2268608-45d5-4bbc-ad9b-de8a2d751a54 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.450638] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1208.450638] env[69784]: value = "task-3467105" [ 1208.450638] env[69784]: _type = "Task" [ 1208.450638] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.459351] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.961460] env[69784]: DEBUG oslo_vmware.exceptions [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1208.961799] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1208.962462] env[69784]: ERROR nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1208.962462] env[69784]: Faults: ['InvalidArgument'] [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Traceback (most recent call last): [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] yield resources [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] self.driver.spawn(context, instance, image_meta, [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] self._fetch_image_if_missing(context, vi) [ 1208.962462] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] image_cache(vi, tmp_image_ds_loc) [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] vm_util.copy_virtual_disk( [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] session._wait_for_task(vmdk_copy_task) [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] return self.wait_for_task(task_ref) [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] return evt.wait() [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] result = hub.switch() [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1208.962836] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] return self.greenlet.switch() [ 1208.963223] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1208.963223] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] self.f(*self.args, **self.kw) [ 1208.963223] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1208.963223] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] raise exceptions.translate_fault(task_info.error) [ 1208.963223] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1208.963223] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Faults: ['InvalidArgument'] [ 1208.963223] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] [ 1208.963223] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Terminating instance [ 1208.964390] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1208.964597] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1208.965058] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-320f0cbd-d04e-4792-a5b9-3ff6241dfeb0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.968267] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1208.968465] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1208.969238] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dff0ed-8abe-47c0-85a2-2abf5beadf34 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.972936] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1208.973213] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1208.974173] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c942572f-e991-4105-8e8d-972066fc20c0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.978349] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1208.978864] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-327fb675-c7bf-47e4-b73c-a62ee1821436 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.981205] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Waiting for the task: (returnval){ [ 1208.981205] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52a29d8e-c2d9-2ae3-7f50-f18f25d379fe" [ 1208.981205] env[69784]: _type = "Task" [ 1208.981205] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.988868] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52a29d8e-c2d9-2ae3-7f50-f18f25d379fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.054365] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1209.054963] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1209.055209] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleting the datastore file [datastore1] 65601835-8d30-46b8-b928-b3912d058c6e {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1209.055509] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d0fad77-2f12-4736-88e8-6fc90874dc06 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.061274] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for the task: (returnval){ [ 1209.061274] env[69784]: value = "task-3467107" [ 1209.061274] env[69784]: _type = "Task" [ 1209.061274] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.069042] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.492853] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1209.493750] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Creating directory with path [datastore1] vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1209.493750] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0bde843f-a1c9-491a-a730-2b9dca2a8dc2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.505761] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Created directory with path [datastore1] vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1209.505974] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Fetch image to [datastore1] vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1209.506328] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1209.506917] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757fbc55-427b-4192-8308-aa4a6bfff4ff {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.514339] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca62c157-fa03-4339-a09d-3b88b801c23e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.525652] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f758a7-5e7c-4726-beec-24cced12bdd3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.559552] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2c999e-1b29-4634-9f14-fe32abbd0b6d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.571527] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b9aa3265-2eb9-4fd4-ba16-0b142c073366 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.573369] env[69784]: DEBUG oslo_vmware.api [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Task: {'id': task-3467107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067661} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.573609] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1209.573789] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1209.573970] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1209.574326] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1209.576526] env[69784]: DEBUG nova.compute.claims [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1209.576705] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1209.576919] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1209.595670] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1209.649533] env[69784]: DEBUG oslo_vmware.rw_handles [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1209.710912] env[69784]: DEBUG oslo_vmware.rw_handles [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1209.711113] env[69784]: DEBUG oslo_vmware.rw_handles [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1209.988158] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9644ca-7775-4568-a399-4aced5c46eaa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.996260] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b0943b-adc8-435a-b58e-0588eb307480 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.027887] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecf6918-1e94-4dec-a1be-5080499c0740 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.036206] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a7355e-5120-4a7b-ac12-8ec70cc6b252 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.051720] env[69784]: DEBUG nova.compute.provider_tree [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1210.063926] env[69784]: DEBUG nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1210.081905] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.505s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1210.083037] env[69784]: ERROR nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1210.083037] env[69784]: Faults: ['InvalidArgument'] [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Traceback (most recent call last): [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] self.driver.spawn(context, instance, image_meta, [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] self._fetch_image_if_missing(context, vi) [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] image_cache(vi, tmp_image_ds_loc) [ 1210.083037] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] vm_util.copy_virtual_disk( [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] session._wait_for_task(vmdk_copy_task) [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] return self.wait_for_task(task_ref) [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] return evt.wait() [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] result = hub.switch() [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] return self.greenlet.switch() [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1210.083470] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] self.f(*self.args, **self.kw) [ 1210.083843] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1210.083843] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] raise exceptions.translate_fault(task_info.error) [ 1210.083843] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1210.083843] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Faults: ['InvalidArgument'] [ 1210.083843] env[69784]: ERROR nova.compute.manager [instance: 65601835-8d30-46b8-b928-b3912d058c6e] [ 1210.083843] env[69784]: DEBUG nova.compute.utils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1210.084736] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Build of instance 65601835-8d30-46b8-b928-b3912d058c6e was re-scheduled: A specified parameter was not correct: fileType [ 1210.084736] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1210.085137] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1210.085313] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1210.085493] env[69784]: DEBUG nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1210.085666] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1210.400044] env[69784]: DEBUG nova.network.neutron [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.414627] env[69784]: INFO nova.compute.manager [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Took 0.33 seconds to deallocate network for instance. [ 1210.533603] env[69784]: INFO nova.scheduler.client.report [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Deleted allocations for instance 65601835-8d30-46b8-b928-b3912d058c6e [ 1210.556505] env[69784]: DEBUG oslo_concurrency.lockutils [None req-46ce3f76-8385-4320-af51-d11731c3933a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "65601835-8d30-46b8-b928-b3912d058c6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 526.871s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1210.557727] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "65601835-8d30-46b8-b928-b3912d058c6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 325.051s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1210.557954] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Acquiring lock "65601835-8d30-46b8-b928-b3912d058c6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1210.558181] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "65601835-8d30-46b8-b928-b3912d058c6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1210.558339] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "65601835-8d30-46b8-b928-b3912d058c6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1210.565216] env[69784]: INFO nova.compute.manager [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Terminating instance [ 1210.569311] env[69784]: DEBUG nova.compute.manager [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1210.570123] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1210.570123] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2963546-3abc-4da9-9ac7-9dd787ee51f9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.572877] env[69784]: DEBUG nova.compute.manager [None req-2e58f984-c062-45e2-bcc3-e1ac851fca6b tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: 5892a6e6-2829-48a0-877d-6c4307861a05] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1210.582201] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd399d4-3114-4d60-952c-03a9673d0798 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.598714] env[69784]: DEBUG nova.compute.manager [None req-2e58f984-c062-45e2-bcc3-e1ac851fca6b tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] [instance: 5892a6e6-2829-48a0-877d-6c4307861a05] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1210.610862] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 65601835-8d30-46b8-b928-b3912d058c6e could not be found. [ 1210.611081] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1210.611254] env[69784]: INFO nova.compute.manager [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1210.611498] env[69784]: DEBUG oslo.service.loopingcall [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1210.611736] env[69784]: DEBUG nova.compute.manager [-] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1210.611874] env[69784]: DEBUG nova.network.neutron [-] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1210.633000] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2e58f984-c062-45e2-bcc3-e1ac851fca6b tempest-MigrationsAdminTest-499229627 tempest-MigrationsAdminTest-499229627-project-member] Lock "5892a6e6-2829-48a0-877d-6c4307861a05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.786s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1210.643908] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1210.647654] env[69784]: DEBUG nova.network.neutron [-] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.663016] env[69784]: INFO nova.compute.manager [-] [instance: 65601835-8d30-46b8-b928-b3912d058c6e] Took 0.05 seconds to deallocate network for instance. [ 1210.732096] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1210.732096] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1210.733314] env[69784]: INFO nova.compute.claims [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1210.788030] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3d50f154-75c0-4800-8063-9edc8da9980a tempest-ListServersNegativeTestJSON-593172322 tempest-ListServersNegativeTestJSON-593172322-project-member] Lock "65601835-8d30-46b8-b928-b3912d058c6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1211.117743] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ce761a-e02f-470f-b45d-2cc34eba0258 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.126492] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fbd050-a859-4465-bc87-18f9dc0a61ba {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.157149] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10278aed-5a4c-4757-8ffc-dec4608f9397 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.165218] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca85313-7323-49b1-a8bf-d127b29f06e6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.178563] env[69784]: DEBUG nova.compute.provider_tree [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.187723] env[69784]: DEBUG nova.scheduler.client.report [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1211.206974] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.476s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1211.207539] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1211.256176] env[69784]: DEBUG nova.compute.utils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1211.257812] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1211.257924] env[69784]: DEBUG nova.network.neutron [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1211.269218] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1211.329532] env[69784]: DEBUG nova.policy [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18fabd386e8643a19c953d771ddef468', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4ca4d6fef784169a26cf27307168456', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1211.345702] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1211.398095] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1211.398636] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1211.399067] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1211.399784] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1211.400053] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1211.400232] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1211.400455] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1211.400619] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1211.400789] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1211.400955] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1211.401171] env[69784]: DEBUG nova.virt.hardware [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1211.402472] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee461941-9434-4b93-9f3a-e34eecbcac9d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.416564] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c768e7-1ba6-4b48-9f17-002ce66dcd22 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.776163] env[69784]: DEBUG nova.network.neutron [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Successfully created port: 9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1211.840160] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.774709] env[69784]: DEBUG nova.compute.manager [req-27cf94f0-239d-41bd-80e5-666b36e6004b req-a6cff234-05f1-4096-ae17-69da2ae1b1f9 service nova] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Received event network-vif-plugged-9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1212.774978] env[69784]: DEBUG oslo_concurrency.lockutils [req-27cf94f0-239d-41bd-80e5-666b36e6004b req-a6cff234-05f1-4096-ae17-69da2ae1b1f9 service nova] Acquiring lock "e6d05e25-386e-43d1-aec4-d62b9476891d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1212.775216] env[69784]: DEBUG oslo_concurrency.lockutils [req-27cf94f0-239d-41bd-80e5-666b36e6004b req-a6cff234-05f1-4096-ae17-69da2ae1b1f9 service nova] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1212.775390] env[69784]: DEBUG oslo_concurrency.lockutils [req-27cf94f0-239d-41bd-80e5-666b36e6004b req-a6cff234-05f1-4096-ae17-69da2ae1b1f9 service nova] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1212.775570] env[69784]: DEBUG nova.compute.manager [req-27cf94f0-239d-41bd-80e5-666b36e6004b req-a6cff234-05f1-4096-ae17-69da2ae1b1f9 service nova] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] No waiting events found dispatching network-vif-plugged-9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1212.775749] env[69784]: WARNING nova.compute.manager [req-27cf94f0-239d-41bd-80e5-666b36e6004b req-a6cff234-05f1-4096-ae17-69da2ae1b1f9 service nova] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Received unexpected event network-vif-plugged-9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3 for instance with vm_state building and task_state spawning. [ 1213.181218] env[69784]: DEBUG nova.network.neutron [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Successfully updated port: 9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1213.191685] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquiring lock "refresh_cache-e6d05e25-386e-43d1-aec4-d62b9476891d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1213.191922] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquired lock "refresh_cache-e6d05e25-386e-43d1-aec4-d62b9476891d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1213.192173] env[69784]: DEBUG nova.network.neutron [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1213.280101] env[69784]: DEBUG nova.network.neutron [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1213.851275] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.852055] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.852239] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1213.872100] env[69784]: DEBUG nova.network.neutron [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Updating instance_info_cache with network_info: [{"id": "9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3", "address": "fa:16:3e:97:2d:58", "network": {"id": "8dd4b51a-0332-4c6a-bf34-763bf5859802", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-732228050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4ca4d6fef784169a26cf27307168456", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d07ffa5-4c", "ovs_interfaceid": "9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.890303] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Releasing lock "refresh_cache-e6d05e25-386e-43d1-aec4-d62b9476891d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1213.890950] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Instance network_info: |[{"id": "9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3", "address": "fa:16:3e:97:2d:58", "network": {"id": "8dd4b51a-0332-4c6a-bf34-763bf5859802", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-732228050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4ca4d6fef784169a26cf27307168456", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d07ffa5-4c", "ovs_interfaceid": "9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1213.891099] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:2d:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1213.899833] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Creating folder: Project (e4ca4d6fef784169a26cf27307168456). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1213.900640] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02a807df-8290-4371-bb94-2338aecca988 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.915303] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Created folder: Project (e4ca4d6fef784169a26cf27307168456) in parent group-v692547. [ 1213.916030] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Creating folder: Instances. Parent ref: group-v692608. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1213.916030] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1b9f15d-c647-40c3-8191-aab47d0b1735 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.933019] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Created folder: Instances in parent group-v692608. [ 1213.933019] env[69784]: DEBUG oslo.service.loopingcall [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1213.933019] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1213.933019] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e9100e2-9c7d-4262-9b7a-05f28c740762 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.968872] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1213.968872] env[69784]: value = "task-3467110" [ 1213.968872] env[69784]: _type = "Task" [ 1213.968872] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.977998] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467110, 'name': CreateVM_Task} progress is 5%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.482867] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467110, 'name': CreateVM_Task} progress is 99%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.985647] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467110, 'name': CreateVM_Task} progress is 99%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.093764] env[69784]: DEBUG nova.compute.manager [req-bc2d7460-2c67-4010-89da-d5057bef4d7d req-1b771119-7694-4ed6-bb3c-2d9a3e284d0f service nova] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Received event network-changed-9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1215.093764] env[69784]: DEBUG nova.compute.manager [req-bc2d7460-2c67-4010-89da-d5057bef4d7d req-1b771119-7694-4ed6-bb3c-2d9a3e284d0f service nova] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Refreshing instance network info cache due to event network-changed-9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1215.093764] env[69784]: DEBUG oslo_concurrency.lockutils [req-bc2d7460-2c67-4010-89da-d5057bef4d7d req-1b771119-7694-4ed6-bb3c-2d9a3e284d0f service nova] Acquiring lock "refresh_cache-e6d05e25-386e-43d1-aec4-d62b9476891d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1215.093764] env[69784]: DEBUG oslo_concurrency.lockutils [req-bc2d7460-2c67-4010-89da-d5057bef4d7d req-1b771119-7694-4ed6-bb3c-2d9a3e284d0f service nova] Acquired lock "refresh_cache-e6d05e25-386e-43d1-aec4-d62b9476891d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1215.094194] env[69784]: DEBUG nova.network.neutron [req-bc2d7460-2c67-4010-89da-d5057bef4d7d req-1b771119-7694-4ed6-bb3c-2d9a3e284d0f service nova] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Refreshing network info cache for port 9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1215.250766] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquiring lock "1473585c-f194-4396-b568-e8c1bc6d049b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1215.250766] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "1473585c-f194-4396-b568-e8c1bc6d049b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1215.480921] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467110, 'name': CreateVM_Task, 'duration_secs': 1.325663} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.481272] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1215.481921] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1215.482083] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1215.482438] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1215.482727] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5353db9b-00fb-4aa5-b029-08a122cada66 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.488915] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Waiting for the task: (returnval){ [ 1215.488915] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]529d1886-85d4-aa8a-cc81-658b55a92946" [ 1215.488915] env[69784]: _type = "Task" [ 1215.488915] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.503100] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]529d1886-85d4-aa8a-cc81-658b55a92946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.587069] env[69784]: DEBUG nova.network.neutron [req-bc2d7460-2c67-4010-89da-d5057bef4d7d req-1b771119-7694-4ed6-bb3c-2d9a3e284d0f service nova] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Updated VIF entry in instance network info cache for port 9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1215.587069] env[69784]: DEBUG nova.network.neutron [req-bc2d7460-2c67-4010-89da-d5057bef4d7d req-1b771119-7694-4ed6-bb3c-2d9a3e284d0f service nova] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Updating instance_info_cache with network_info: [{"id": "9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3", "address": "fa:16:3e:97:2d:58", "network": {"id": "8dd4b51a-0332-4c6a-bf34-763bf5859802", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-732228050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4ca4d6fef784169a26cf27307168456", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d07ffa5-4c", "ovs_interfaceid": "9d07ffa5-4cf0-46b9-baf5-3ae61bda28b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.604257] env[69784]: DEBUG oslo_concurrency.lockutils [req-bc2d7460-2c67-4010-89da-d5057bef4d7d req-1b771119-7694-4ed6-bb3c-2d9a3e284d0f service nova] Releasing lock "refresh_cache-e6d05e25-386e-43d1-aec4-d62b9476891d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1215.841241] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.841597] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1216.000426] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1216.000849] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1216.001310] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1216.840411] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.242979] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "697cd7aa-d710-4e46-b241-085961a8631d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1217.243222] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "697cd7aa-d710-4e46-b241-085961a8631d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1217.803329] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquiring lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1217.803583] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1217.839301] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.839457] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances with incomplete migration {{(pid=69784) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1218.850155] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.850450] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1218.867555] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] There are 1 instances to clean {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1218.867845] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 82981529-56c3-43c0-8d33-c2f2b0875bfc] Instance has had 0 of 5 cleanup attempts {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11211}} [ 1219.890099] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1219.890436] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1219.890490] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1219.890644] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1219.911249] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.911424] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.911551] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.911679] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.911879] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.912046] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.912178] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.912299] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.912418] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.912534] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1219.912654] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1220.840602] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.851915] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1220.852149] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1220.852316] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1220.852469] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1220.853619] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b9e6fd-b11d-4a20-a214-49b80fed43a5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.863840] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d8ebf6-d826-4699-b12f-5dd14696cb85 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.877490] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a08f0d8-a807-4990-af44-955553a19331 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.883342] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ee57d6-209c-49da-ba62-7b2bac7e0e47 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.912014] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180898MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1220.912274] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1220.912351] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1221.039324] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9369b20b-7027-47de-8495-a503ddfb69bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.039324] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.039459] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.039459] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.039576] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.039697] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.039814] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.039929] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.040053] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.040169] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.052056] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.063062] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f3520c81-2ace-4113-8812-11334cc2f509 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.072601] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 13fd6e03-4f8d-4e3c-a063-fd27362852e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.081963] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 59b297a2-1b8e-49e5-8c7d-2e585d109f94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.091031] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 20518e8e-51f0-4d53-827f-f0c1a57b3bc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.100817] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cb507720-ad76-406c-b4f1-4d08fda1804d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.110434] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.119696] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9b208c32-b233-4312-902a-3e4be0ddb23b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.129164] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 2d4c834f-d9da-497f-8863-28c30e11c113 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.139079] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d165cf46-31ba-40e3-b7e9-fad5f05242ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.148571] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 8c4c0989-5269-4b6a-9b5a-778803657608 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.160173] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.170902] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.180282] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.180520] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1221.180685] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1221.266455] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing inventories for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1221.291477] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating ProviderTree inventory for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1221.291702] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating inventory in ProviderTree for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1221.308603] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing aggregate associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, aggregates: None {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1221.333947] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing trait associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1221.625444] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6919308-6ddf-4705-beea-3c7683cbc744 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.632898] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8222ce90-c855-4c8b-b956-c136a3317713 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.661573] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace925a8-c55f-4e7b-98b7-c702786f3e45 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.668475] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b00364-55d9-4da5-acd1-8ca864b4fc62 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.681499] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.689716] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1221.703126] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1221.703330] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.791s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1223.703660] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.835735] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.726140] env[69784]: WARNING oslo_vmware.rw_handles [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1255.726140] env[69784]: ERROR oslo_vmware.rw_handles [ 1255.726755] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1255.728558] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1255.728797] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Copying Virtual Disk [datastore1] vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/d39445f8-2824-4efa-b0cf-ecfb2f069202/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1255.729084] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-794ac854-bd70-42bf-8665-bc338f546185 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.736623] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Waiting for the task: (returnval){ [ 1255.736623] env[69784]: value = "task-3467111" [ 1255.736623] env[69784]: _type = "Task" [ 1255.736623] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.744669] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Task: {'id': task-3467111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.247879] env[69784]: DEBUG oslo_vmware.exceptions [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1256.248186] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1256.248738] env[69784]: ERROR nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1256.248738] env[69784]: Faults: ['InvalidArgument'] [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Traceback (most recent call last): [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] yield resources [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] self.driver.spawn(context, instance, image_meta, [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] self._fetch_image_if_missing(context, vi) [ 1256.248738] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] image_cache(vi, tmp_image_ds_loc) [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] vm_util.copy_virtual_disk( [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] session._wait_for_task(vmdk_copy_task) [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] return self.wait_for_task(task_ref) [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] return evt.wait() [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] result = hub.switch() [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1256.249091] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] return self.greenlet.switch() [ 1256.249423] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1256.249423] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] self.f(*self.args, **self.kw) [ 1256.249423] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1256.249423] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] raise exceptions.translate_fault(task_info.error) [ 1256.249423] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1256.249423] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Faults: ['InvalidArgument'] [ 1256.249423] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] [ 1256.249423] env[69784]: INFO nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Terminating instance [ 1256.250538] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1256.250731] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1256.250960] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6d9d777-3672-4683-be70-c9a3e7bfc2cb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.253207] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1256.253362] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquired lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1256.253526] env[69784]: DEBUG nova.network.neutron [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1256.260893] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1256.260893] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1256.261349] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4685e9c5-f6a6-4c06-a225-70c8eb26cea3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.270024] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Waiting for the task: (returnval){ [ 1256.270024] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c811ee-cdf4-1e13-056b-19c3cb24b031" [ 1256.270024] env[69784]: _type = "Task" [ 1256.270024] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.277739] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c811ee-cdf4-1e13-056b-19c3cb24b031, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.285292] env[69784]: DEBUG nova.network.neutron [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1256.352340] env[69784]: DEBUG nova.network.neutron [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.361160] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Releasing lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1256.361571] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1256.361787] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1256.362829] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51dcadac-5f01-430e-aef2-26514d0c49ae {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.370143] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1256.370559] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5fbf6cd7-2f59-46f2-9f9d-1a926b089360 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.405080] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1256.405302] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1256.405482] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Deleting the datastore file [datastore1] 9369b20b-7027-47de-8495-a503ddfb69bd {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1256.405726] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1918d66c-70c4-4de3-afb3-4f75bccdfb19 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.411858] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Waiting for the task: (returnval){ [ 1256.411858] env[69784]: value = "task-3467113" [ 1256.411858] env[69784]: _type = "Task" [ 1256.411858] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.419484] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Task: {'id': task-3467113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.780293] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1256.780562] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Creating directory with path [datastore1] vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1256.780782] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40770247-6b92-415f-a201-1e5bb0bc4842 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.792126] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Created directory with path [datastore1] vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1256.792326] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Fetch image to [datastore1] vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1256.792495] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1256.793282] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26581d4-53aa-4c81-af8e-f810bf49d710 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.800475] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6972b0-2a71-477e-9ff8-4053d6d0604d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.809207] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a18be4-bf33-4b13-b93d-1174b610e954 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.841248] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f19c3e2-712a-44cc-b738-ba7cf8c4db5b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.846683] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-34952a8a-7a7d-4ba8-b89e-43165d62196f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.866833] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1256.922031] env[69784]: DEBUG oslo_vmware.api [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Task: {'id': task-3467113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033901} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.922187] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1256.922276] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1256.922457] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1256.922587] env[69784]: INFO nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1256.922824] env[69784]: DEBUG oslo.service.loopingcall [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1256.923036] env[69784]: DEBUG nova.compute.manager [-] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network deallocation for instance since networking was not requested. {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1256.925139] env[69784]: DEBUG nova.compute.claims [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1256.925309] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1256.925516] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1256.932689] env[69784]: DEBUG oslo_vmware.rw_handles [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1256.994975] env[69784]: DEBUG oslo_vmware.rw_handles [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1256.995137] env[69784]: DEBUG oslo_vmware.rw_handles [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1257.293761] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4decbb46-39ab-4e39-951e-6f820c20e0eb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.301105] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ee26a2-ecde-4190-a3b8-47e99c8ae323 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.331545] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6c7ab0-9ede-484b-aa0a-5dd9b50e6d70 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.338825] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc61cb24-2c02-46ab-bb06-03acd235dcb7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.351704] env[69784]: DEBUG nova.compute.provider_tree [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.361660] env[69784]: DEBUG nova.scheduler.client.report [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1257.380706] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.455s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1257.381282] env[69784]: ERROR nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1257.381282] env[69784]: Faults: ['InvalidArgument'] [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Traceback (most recent call last): [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] self.driver.spawn(context, instance, image_meta, [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] self._fetch_image_if_missing(context, vi) [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] image_cache(vi, tmp_image_ds_loc) [ 1257.381282] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] vm_util.copy_virtual_disk( [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] session._wait_for_task(vmdk_copy_task) [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] return self.wait_for_task(task_ref) [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] return evt.wait() [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] result = hub.switch() [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] return self.greenlet.switch() [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1257.381868] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] self.f(*self.args, **self.kw) [ 1257.382505] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1257.382505] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] raise exceptions.translate_fault(task_info.error) [ 1257.382505] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1257.382505] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Faults: ['InvalidArgument'] [ 1257.382505] env[69784]: ERROR nova.compute.manager [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] [ 1257.382505] env[69784]: DEBUG nova.compute.utils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1257.383461] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Build of instance 9369b20b-7027-47de-8495-a503ddfb69bd was re-scheduled: A specified parameter was not correct: fileType [ 1257.383461] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1257.383922] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1257.384181] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1257.384348] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquired lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1257.384513] env[69784]: DEBUG nova.network.neutron [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1257.406829] env[69784]: DEBUG nova.network.neutron [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1257.467812] env[69784]: DEBUG nova.network.neutron [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.475851] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Releasing lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1257.476140] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1257.476378] env[69784]: DEBUG nova.compute.manager [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Skipping network deallocation for instance since networking was not requested. {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1257.576691] env[69784]: INFO nova.scheduler.client.report [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Deleted allocations for instance 9369b20b-7027-47de-8495-a503ddfb69bd [ 1257.616988] env[69784]: DEBUG oslo_concurrency.lockutils [None req-dd3d0b2f-6347-47db-89fe-c1db5100ed36 tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "9369b20b-7027-47de-8495-a503ddfb69bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.245s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1257.618224] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "9369b20b-7027-47de-8495-a503ddfb69bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 370.795s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1257.618822] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "9369b20b-7027-47de-8495-a503ddfb69bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1257.618822] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "9369b20b-7027-47de-8495-a503ddfb69bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1257.618822] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "9369b20b-7027-47de-8495-a503ddfb69bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1257.621240] env[69784]: INFO nova.compute.manager [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Terminating instance [ 1257.625637] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquiring lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1257.625835] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Acquired lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1257.626068] env[69784]: DEBUG nova.network.neutron [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1257.648462] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1257.652758] env[69784]: DEBUG nova.network.neutron [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1257.717440] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1257.717688] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1257.719203] env[69784]: INFO nova.compute.claims [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1257.757874] env[69784]: DEBUG nova.network.neutron [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.766844] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Releasing lock "refresh_cache-9369b20b-7027-47de-8495-a503ddfb69bd" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1257.766844] env[69784]: DEBUG nova.compute.manager [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1257.767045] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1257.767716] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d24b81fa-a92d-4ba3-b8bb-d2ca7a116720 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.777908] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24ec6a3-510b-44c4-8ba9-377f892eb264 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.810185] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9369b20b-7027-47de-8495-a503ddfb69bd could not be found. [ 1257.810185] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1257.810185] env[69784]: INFO nova.compute.manager [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1257.810185] env[69784]: DEBUG oslo.service.loopingcall [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1257.812158] env[69784]: DEBUG nova.compute.manager [-] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1257.812264] env[69784]: DEBUG nova.network.neutron [-] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1257.836391] env[69784]: DEBUG nova.network.neutron [-] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1257.846265] env[69784]: DEBUG nova.network.neutron [-] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.858489] env[69784]: INFO nova.compute.manager [-] [instance: 9369b20b-7027-47de-8495-a503ddfb69bd] Took 0.05 seconds to deallocate network for instance. [ 1257.972772] env[69784]: DEBUG oslo_concurrency.lockutils [None req-723d9a85-5d3f-49a9-9944-843e14ab3a1b tempest-ServersAdmin275Test-964351580 tempest-ServersAdmin275Test-964351580-project-member] Lock "9369b20b-7027-47de-8495-a503ddfb69bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.354s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1258.047212] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5bf70a4-9033-437a-bb34-0ce81aa12814 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.054690] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a4d062-f4cc-481d-b545-c0d140cc3e61 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.083610] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3898eea3-47f2-4ef5-8ac4-9cc43c6a4327 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.090390] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833aeaa7-9111-41b9-a82d-b81c81af3a7f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.103489] env[69784]: DEBUG nova.compute.provider_tree [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.111186] env[69784]: DEBUG nova.scheduler.client.report [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1258.128010] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.410s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1258.147808] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "8373910a-0023-4853-b1ba-c09b56c3ce01" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1258.148040] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "8373910a-0023-4853-b1ba-c09b56c3ce01" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1258.153016] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "8373910a-0023-4853-b1ba-c09b56c3ce01" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.005s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1258.153469] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1258.193813] env[69784]: DEBUG nova.compute.utils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1258.194978] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1258.195157] env[69784]: DEBUG nova.network.neutron [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1258.206843] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1258.272878] env[69784]: DEBUG nova.policy [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45fa2e15fc074d87859c80871f8c1dd6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8442d798587471786c22c4d3e79bb9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1258.277784] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1258.316019] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1258.316299] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1258.316457] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1258.316683] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1258.316805] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1258.316945] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1258.317215] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1258.317356] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1258.317521] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1258.317683] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1258.317855] env[69784]: DEBUG nova.virt.hardware [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1258.318709] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b73be0d-4d02-425b-88d9-2a438162a86c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.326474] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d815fa-954c-4651-8179-512680188be6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.800750] env[69784]: DEBUG nova.network.neutron [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Successfully created port: cfd59ee9-c517-4702-b7ae-448c204343f6 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1259.730988] env[69784]: DEBUG nova.compute.manager [req-aa2facf4-bfcf-480f-b030-1eb3c79aed32 req-168ac2ec-3ad8-4522-96b6-f6d43f643aea service nova] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Received event network-vif-plugged-cfd59ee9-c517-4702-b7ae-448c204343f6 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1259.731236] env[69784]: DEBUG oslo_concurrency.lockutils [req-aa2facf4-bfcf-480f-b030-1eb3c79aed32 req-168ac2ec-3ad8-4522-96b6-f6d43f643aea service nova] Acquiring lock "26793ea1-2934-4b30-8f8c-6beefe7046f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1259.731447] env[69784]: DEBUG oslo_concurrency.lockutils [req-aa2facf4-bfcf-480f-b030-1eb3c79aed32 req-168ac2ec-3ad8-4522-96b6-f6d43f643aea service nova] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1259.731612] env[69784]: DEBUG oslo_concurrency.lockutils [req-aa2facf4-bfcf-480f-b030-1eb3c79aed32 req-168ac2ec-3ad8-4522-96b6-f6d43f643aea service nova] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1259.731794] env[69784]: DEBUG nova.compute.manager [req-aa2facf4-bfcf-480f-b030-1eb3c79aed32 req-168ac2ec-3ad8-4522-96b6-f6d43f643aea service nova] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] No waiting events found dispatching network-vif-plugged-cfd59ee9-c517-4702-b7ae-448c204343f6 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1259.731987] env[69784]: WARNING nova.compute.manager [req-aa2facf4-bfcf-480f-b030-1eb3c79aed32 req-168ac2ec-3ad8-4522-96b6-f6d43f643aea service nova] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Received unexpected event network-vif-plugged-cfd59ee9-c517-4702-b7ae-448c204343f6 for instance with vm_state building and task_state spawning. [ 1259.839737] env[69784]: DEBUG nova.network.neutron [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Successfully updated port: cfd59ee9-c517-4702-b7ae-448c204343f6 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1259.857452] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "refresh_cache-26793ea1-2934-4b30-8f8c-6beefe7046f7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1259.857452] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquired lock "refresh_cache-26793ea1-2934-4b30-8f8c-6beefe7046f7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1259.857452] env[69784]: DEBUG nova.network.neutron [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1259.893808] env[69784]: DEBUG nova.network.neutron [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1260.103543] env[69784]: DEBUG nova.network.neutron [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Updating instance_info_cache with network_info: [{"id": "cfd59ee9-c517-4702-b7ae-448c204343f6", "address": "fa:16:3e:20:09:56", "network": {"id": "705defb9-39ff-44a5-bb7c-7497815f648d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1316073970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8442d798587471786c22c4d3e79bb9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd59ee9-c5", "ovs_interfaceid": "cfd59ee9-c517-4702-b7ae-448c204343f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.119120] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Releasing lock "refresh_cache-26793ea1-2934-4b30-8f8c-6beefe7046f7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1260.119440] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Instance network_info: |[{"id": "cfd59ee9-c517-4702-b7ae-448c204343f6", "address": "fa:16:3e:20:09:56", "network": {"id": "705defb9-39ff-44a5-bb7c-7497815f648d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1316073970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8442d798587471786c22c4d3e79bb9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd59ee9-c5", "ovs_interfaceid": "cfd59ee9-c517-4702-b7ae-448c204343f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1260.119831] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:09:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cfd59ee9-c517-4702-b7ae-448c204343f6', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1260.128547] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Creating folder: Project (f8442d798587471786c22c4d3e79bb9b). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1260.130042] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee18a822-72ed-44c9-961b-0aa94c5f5040 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.139634] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Created folder: Project (f8442d798587471786c22c4d3e79bb9b) in parent group-v692547. [ 1260.139831] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Creating folder: Instances. Parent ref: group-v692611. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1260.140078] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-655c2844-8bbe-4250-815c-ce03d8b976d3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.148688] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Created folder: Instances in parent group-v692611. [ 1260.148915] env[69784]: DEBUG oslo.service.loopingcall [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1260.149103] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1260.149298] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c864c36-d1e8-4376-bf22-0c5b69896743 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.168139] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1260.168139] env[69784]: value = "task-3467116" [ 1260.168139] env[69784]: _type = "Task" [ 1260.168139] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.175246] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467116, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.677687] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467116, 'name': CreateVM_Task, 'duration_secs': 0.310185} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.677854] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1260.678539] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1260.678704] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1260.679063] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1260.679326] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbe3da54-920f-4f60-84a2-893dc5c6c271 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.683947] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Waiting for the task: (returnval){ [ 1260.683947] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]528c8184-8d15-bdcb-251f-403ad6cff097" [ 1260.683947] env[69784]: _type = "Task" [ 1260.683947] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.692628] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]528c8184-8d15-bdcb-251f-403ad6cff097, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.194300] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1261.194668] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1261.194789] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1261.754590] env[69784]: DEBUG nova.compute.manager [req-18e47604-f0db-4c5c-9d1b-d88ef19ac1b0 req-0c11e897-5c81-4ed0-a0df-5ec02e3f9430 service nova] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Received event network-changed-cfd59ee9-c517-4702-b7ae-448c204343f6 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1261.754802] env[69784]: DEBUG nova.compute.manager [req-18e47604-f0db-4c5c-9d1b-d88ef19ac1b0 req-0c11e897-5c81-4ed0-a0df-5ec02e3f9430 service nova] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Refreshing instance network info cache due to event network-changed-cfd59ee9-c517-4702-b7ae-448c204343f6. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1261.755031] env[69784]: DEBUG oslo_concurrency.lockutils [req-18e47604-f0db-4c5c-9d1b-d88ef19ac1b0 req-0c11e897-5c81-4ed0-a0df-5ec02e3f9430 service nova] Acquiring lock "refresh_cache-26793ea1-2934-4b30-8f8c-6beefe7046f7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1261.755185] env[69784]: DEBUG oslo_concurrency.lockutils [req-18e47604-f0db-4c5c-9d1b-d88ef19ac1b0 req-0c11e897-5c81-4ed0-a0df-5ec02e3f9430 service nova] Acquired lock "refresh_cache-26793ea1-2934-4b30-8f8c-6beefe7046f7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1261.755352] env[69784]: DEBUG nova.network.neutron [req-18e47604-f0db-4c5c-9d1b-d88ef19ac1b0 req-0c11e897-5c81-4ed0-a0df-5ec02e3f9430 service nova] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Refreshing network info cache for port cfd59ee9-c517-4702-b7ae-448c204343f6 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1262.144611] env[69784]: DEBUG nova.network.neutron [req-18e47604-f0db-4c5c-9d1b-d88ef19ac1b0 req-0c11e897-5c81-4ed0-a0df-5ec02e3f9430 service nova] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Updated VIF entry in instance network info cache for port cfd59ee9-c517-4702-b7ae-448c204343f6. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1262.144978] env[69784]: DEBUG nova.network.neutron [req-18e47604-f0db-4c5c-9d1b-d88ef19ac1b0 req-0c11e897-5c81-4ed0-a0df-5ec02e3f9430 service nova] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Updating instance_info_cache with network_info: [{"id": "cfd59ee9-c517-4702-b7ae-448c204343f6", "address": "fa:16:3e:20:09:56", "network": {"id": "705defb9-39ff-44a5-bb7c-7497815f648d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1316073970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8442d798587471786c22c4d3e79bb9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd59ee9-c5", "ovs_interfaceid": "cfd59ee9-c517-4702-b7ae-448c204343f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.154303] env[69784]: DEBUG oslo_concurrency.lockutils [req-18e47604-f0db-4c5c-9d1b-d88ef19ac1b0 req-0c11e897-5c81-4ed0-a0df-5ec02e3f9430 service nova] Releasing lock "refresh_cache-26793ea1-2934-4b30-8f8c-6beefe7046f7" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1266.268278] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1266.268565] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1266.423347] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d9758d6e-bb0e-4e07-91bd-04c5c71d2340 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "96ccaa91-fd19-4793-9583-afb4d5708cd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1266.423580] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d9758d6e-bb0e-4e07-91bd-04c5c71d2340 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "96ccaa91-fd19-4793-9583-afb4d5708cd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1266.764453] env[69784]: DEBUG oslo_concurrency.lockutils [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquiring lock "e6d05e25-386e-43d1-aec4-d62b9476891d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.846501] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.868979] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Getting list of instances from cluster (obj){ [ 1272.868979] env[69784]: value = "domain-c8" [ 1272.868979] env[69784]: _type = "ClusterComputeResource" [ 1272.868979] env[69784]: } {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1272.870541] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a5c884-92e5-4627-ba2f-450b77fb5168 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.889212] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Got total of 10 instances {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1272.889438] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid ecec531e-41d9-47e3-b447-bc658edaea69 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.889679] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid c68ee659-716d-47cc-a6a1-d4c18fa5664f {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.889895] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 16edc743-b24c-4a20-9046-f5d519bd7e9a {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.890088] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid c23e7041-ca02-4047-84d5-84b62f36b37f {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.890254] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 7a640743-734e-4dc0-a965-0a71dddfb918 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.890414] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 288af650-a19b-4ce5-baea-013dcaa6e908 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.890566] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 6109a6f5-11ea-4983-b271-f84aa859d6cd {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.890713] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 694e2a62-5f2e-475d-9356-a66651c3e5e2 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.890861] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid e6d05e25-386e-43d1-aec4-d62b9476891d {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.891015] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 26793ea1-2934-4b30-8f8c-6beefe7046f7 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1272.891353] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "ecec531e-41d9-47e3-b447-bc658edaea69" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.891588] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.891825] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.892097] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "c23e7041-ca02-4047-84d5-84b62f36b37f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.892439] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "7a640743-734e-4dc0-a965-0a71dddfb918" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.892544] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "288af650-a19b-4ce5-baea-013dcaa6e908" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.892737] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.892929] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.893142] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "e6d05e25-386e-43d1-aec4-d62b9476891d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.893341] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1274.888112] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.839571] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.839833] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.839979] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1276.841033] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.841303] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.835801] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.839425] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.839587] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1280.839706] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1280.861101] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.861254] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.861399] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.861525] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.861647] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.861768] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.861889] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.862018] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.862288] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.862425] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1280.862546] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1281.839545] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1281.850323] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1281.850552] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1281.850722] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1281.850879] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1281.851986] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b37c82-b9c3-43e4-b007-fe0ccda9de18 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.860925] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62e2e09-2fb3-47c5-aa74-5d30bc22f273 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.875124] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf905bc6-fd39-4cbb-bbfc-3ada277a9cfa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.881259] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6615ba08-2e93-4800-ba07-a0b530e6fbaa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.909350] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180914MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1281.909497] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1281.909690] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1281.981227] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ecec531e-41d9-47e3-b447-bc658edaea69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.981396] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.981526] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.981650] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.981770] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.981888] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.982015] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.982143] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.982282] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.982421] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1281.996822] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f3520c81-2ace-4113-8812-11334cc2f509 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.006738] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 13fd6e03-4f8d-4e3c-a063-fd27362852e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.017145] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 59b297a2-1b8e-49e5-8c7d-2e585d109f94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.026637] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 20518e8e-51f0-4d53-827f-f0c1a57b3bc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.036406] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance cb507720-ad76-406c-b4f1-4d08fda1804d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.045576] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.055184] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 9b208c32-b233-4312-902a-3e4be0ddb23b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.064144] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 2d4c834f-d9da-497f-8863-28c30e11c113 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.074889] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance d165cf46-31ba-40e3-b7e9-fad5f05242ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.083763] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 8c4c0989-5269-4b6a-9b5a-778803657608 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.095941] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.105581] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.115198] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.124533] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.133405] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 96ccaa91-fd19-4793-9583-afb4d5708cd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.133638] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1282.133785] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1282.411282] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f5a9d0-d969-4f03-860e-fff817273ce5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.419446] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9702e0fe-de6f-4947-9306-14fcc850151b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.447889] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ea3fd7-5217-47a8-a908-117ac2a28400 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.454961] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a3db72-107e-46c6-8e87-b694b3c5911f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.468013] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1282.478255] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1282.493128] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1282.493318] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.584s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1285.493643] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.388926] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1294.471468] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e24dea2d-74c9-4f93-a134-b459b6b8bcb3 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Acquiring lock "3d3b940f-5376-4300-8b69-6cf40b0e2e31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1294.471784] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e24dea2d-74c9-4f93-a134-b459b6b8bcb3 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Lock "3d3b940f-5376-4300-8b69-6cf40b0e2e31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1299.311269] env[69784]: DEBUG oslo_concurrency.lockutils [None req-91368744-5eb0-4443-a864-f40c9603df9f tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "f04ff971-fae1-453a-a131-308618f24020" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1299.311876] env[69784]: DEBUG oslo_concurrency.lockutils [None req-91368744-5eb0-4443-a864-f40c9603df9f tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "f04ff971-fae1-453a-a131-308618f24020" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1301.404583] env[69784]: DEBUG oslo_concurrency.lockutils [None req-bb7d4401-b86d-4454-b065-4c6e889e0127 tempest-ServerRescueTestJSON-660458893 tempest-ServerRescueTestJSON-660458893-project-member] Acquiring lock "a32a5943-a449-4b47-820c-5a7e593c6443" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1301.404906] env[69784]: DEBUG oslo_concurrency.lockutils [None req-bb7d4401-b86d-4454-b065-4c6e889e0127 tempest-ServerRescueTestJSON-660458893 tempest-ServerRescueTestJSON-660458893-project-member] Lock "a32a5943-a449-4b47-820c-5a7e593c6443" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1306.362117] env[69784]: WARNING oslo_vmware.rw_handles [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1306.362117] env[69784]: ERROR oslo_vmware.rw_handles [ 1306.362997] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1306.364859] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1306.365176] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Copying Virtual Disk [datastore1] vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/46537da4-825d-4ef6-886f-afaef1976600/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1306.365488] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d78d30b-8e15-4d51-8445-9e1dfeb97f86 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.373294] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Waiting for the task: (returnval){ [ 1306.373294] env[69784]: value = "task-3467117" [ 1306.373294] env[69784]: _type = "Task" [ 1306.373294] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.381039] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Task: {'id': task-3467117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.884699] env[69784]: DEBUG oslo_vmware.exceptions [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1306.884984] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1306.885567] env[69784]: ERROR nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1306.885567] env[69784]: Faults: ['InvalidArgument'] [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Traceback (most recent call last): [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] yield resources [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] self.driver.spawn(context, instance, image_meta, [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] self._fetch_image_if_missing(context, vi) [ 1306.885567] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] image_cache(vi, tmp_image_ds_loc) [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] vm_util.copy_virtual_disk( [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] session._wait_for_task(vmdk_copy_task) [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] return self.wait_for_task(task_ref) [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] return evt.wait() [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] result = hub.switch() [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1306.885975] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] return self.greenlet.switch() [ 1306.886543] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1306.886543] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] self.f(*self.args, **self.kw) [ 1306.886543] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1306.886543] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] raise exceptions.translate_fault(task_info.error) [ 1306.886543] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1306.886543] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Faults: ['InvalidArgument'] [ 1306.886543] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] [ 1306.886543] env[69784]: INFO nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Terminating instance [ 1306.887452] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1306.887692] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1306.888362] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1306.888555] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1306.888773] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa63ecb2-4a44-4165-9492-b671fb5fcccc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.891106] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d96e81-745c-42a0-b07b-3d42136e51b2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.898114] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1306.898332] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f2049e8-02ce-4618-bc87-bacda8f57488 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.900451] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1306.900636] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1306.901845] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb422907-fe02-49e5-8013-441bb95d8aee {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.906338] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Waiting for the task: (returnval){ [ 1306.906338] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]526c5e03-9c2f-3b3e-aefa-cff7bb964c78" [ 1306.906338] env[69784]: _type = "Task" [ 1306.906338] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.916969] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]526c5e03-9c2f-3b3e-aefa-cff7bb964c78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.417453] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1307.417880] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Creating directory with path [datastore1] vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.418058] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ef51a5b-b5f6-4513-9350-d9e9681e03d8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.437328] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Created directory with path [datastore1] vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.437511] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Fetch image to [datastore1] vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1307.437676] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1307.438401] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51983be-5c8b-4300-9d22-41749a654d62 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.445361] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8857d8f7-126e-48bf-b32e-36f7c0517c72 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.454262] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8485fd34-c992-492f-9895-51939adb2a6d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.484200] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b2fb74-f801-4916-9aa7-55219a507c35 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.489447] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-643b74ee-93e5-4dee-9556-ffd795a52474 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.511549] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1307.679200] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1307.739369] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1307.739741] env[69784]: DEBUG oslo_vmware.rw_handles [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1308.552474] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1308.552870] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1308.552870] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Deleting the datastore file [datastore1] ecec531e-41d9-47e3-b447-bc658edaea69 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1308.553182] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ebe2d86-1c82-4345-9cf3-e418b2e9f20c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.559573] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Waiting for the task: (returnval){ [ 1308.559573] env[69784]: value = "task-3467119" [ 1308.559573] env[69784]: _type = "Task" [ 1308.559573] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.567331] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Task: {'id': task-3467119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.070104] env[69784]: DEBUG oslo_vmware.api [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Task: {'id': task-3467119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080426} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.070365] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1309.070551] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1309.070724] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1309.070897] env[69784]: INFO nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1309.073896] env[69784]: DEBUG nova.compute.claims [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1309.074080] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1309.074329] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1309.370342] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac47bd6f-4bd8-444c-94e9-e571eb765f1f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.379618] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2d1a0e-2b75-4e80-bdbb-deba10355f92 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.409746] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e5019e-4e29-4e38-b5dc-fc21b793c4a5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.417454] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a419d35f-8616-4a60-8d08-3dd8e291c696 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.431210] env[69784]: DEBUG nova.compute.provider_tree [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1309.441415] env[69784]: DEBUG nova.scheduler.client.report [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1309.454891] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.380s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1309.455428] env[69784]: ERROR nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1309.455428] env[69784]: Faults: ['InvalidArgument'] [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Traceback (most recent call last): [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] self.driver.spawn(context, instance, image_meta, [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] self._fetch_image_if_missing(context, vi) [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] image_cache(vi, tmp_image_ds_loc) [ 1309.455428] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] vm_util.copy_virtual_disk( [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] session._wait_for_task(vmdk_copy_task) [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] return self.wait_for_task(task_ref) [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] return evt.wait() [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] result = hub.switch() [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] return self.greenlet.switch() [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1309.455838] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] self.f(*self.args, **self.kw) [ 1309.456240] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1309.456240] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] raise exceptions.translate_fault(task_info.error) [ 1309.456240] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1309.456240] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Faults: ['InvalidArgument'] [ 1309.456240] env[69784]: ERROR nova.compute.manager [instance: ecec531e-41d9-47e3-b447-bc658edaea69] [ 1309.456240] env[69784]: DEBUG nova.compute.utils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1309.457402] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Build of instance ecec531e-41d9-47e3-b447-bc658edaea69 was re-scheduled: A specified parameter was not correct: fileType [ 1309.457402] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1309.457802] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1309.457940] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1309.458132] env[69784]: DEBUG nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1309.458293] env[69784]: DEBUG nova.network.neutron [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1309.901700] env[69784]: DEBUG nova.network.neutron [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.919057] env[69784]: INFO nova.compute.manager [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Took 0.46 seconds to deallocate network for instance. [ 1310.027928] env[69784]: INFO nova.scheduler.client.report [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Deleted allocations for instance ecec531e-41d9-47e3-b447-bc658edaea69 [ 1310.063798] env[69784]: DEBUG oslo_concurrency.lockutils [None req-81c4a500-f9dc-4578-a661-b345e415a598 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "ecec531e-41d9-47e3-b447-bc658edaea69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 618.979s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.065044] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "ecec531e-41d9-47e3-b447-bc658edaea69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 419.810s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1310.065271] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Acquiring lock "ecec531e-41d9-47e3-b447-bc658edaea69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1310.065477] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "ecec531e-41d9-47e3-b447-bc658edaea69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1310.065645] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "ecec531e-41d9-47e3-b447-bc658edaea69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.069925] env[69784]: INFO nova.compute.manager [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Terminating instance [ 1310.072276] env[69784]: DEBUG nova.compute.manager [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1310.072474] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1310.072746] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f061ab7-f270-45db-a25b-45608012402c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.084676] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d98655e-0e4b-4188-90a3-5f7641ad5d50 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.098344] env[69784]: DEBUG nova.compute.manager [None req-3ac0a0ce-e596-4b32-b96f-3df20191f312 tempest-ServerRescueTestJSONUnderV235-56349946 tempest-ServerRescueTestJSONUnderV235-56349946-project-member] [instance: f3520c81-2ace-4113-8812-11334cc2f509] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1310.119213] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ecec531e-41d9-47e3-b447-bc658edaea69 could not be found. [ 1310.119424] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1310.119603] env[69784]: INFO nova.compute.manager [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1310.120098] env[69784]: DEBUG oslo.service.loopingcall [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1310.120098] env[69784]: DEBUG nova.compute.manager [-] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1310.120276] env[69784]: DEBUG nova.network.neutron [-] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1310.126386] env[69784]: DEBUG nova.compute.manager [None req-3ac0a0ce-e596-4b32-b96f-3df20191f312 tempest-ServerRescueTestJSONUnderV235-56349946 tempest-ServerRescueTestJSONUnderV235-56349946-project-member] [instance: f3520c81-2ace-4113-8812-11334cc2f509] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1310.149991] env[69784]: DEBUG nova.network.neutron [-] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.152179] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3ac0a0ce-e596-4b32-b96f-3df20191f312 tempest-ServerRescueTestJSONUnderV235-56349946 tempest-ServerRescueTestJSONUnderV235-56349946-project-member] Lock "f3520c81-2ace-4113-8812-11334cc2f509" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.463s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.161531] env[69784]: INFO nova.compute.manager [-] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] Took 0.04 seconds to deallocate network for instance. [ 1310.167034] env[69784]: DEBUG nova.compute.manager [None req-39264a63-68c4-4e81-af74-299d20a7a5eb tempest-ServerDiagnosticsNegativeTest-1424102192 tempest-ServerDiagnosticsNegativeTest-1424102192-project-member] [instance: 13fd6e03-4f8d-4e3c-a063-fd27362852e0] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1310.191613] env[69784]: DEBUG nova.compute.manager [None req-39264a63-68c4-4e81-af74-299d20a7a5eb tempest-ServerDiagnosticsNegativeTest-1424102192 tempest-ServerDiagnosticsNegativeTest-1424102192-project-member] [instance: 13fd6e03-4f8d-4e3c-a063-fd27362852e0] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1310.222269] env[69784]: DEBUG oslo_concurrency.lockutils [None req-39264a63-68c4-4e81-af74-299d20a7a5eb tempest-ServerDiagnosticsNegativeTest-1424102192 tempest-ServerDiagnosticsNegativeTest-1424102192-project-member] Lock "13fd6e03-4f8d-4e3c-a063-fd27362852e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.818s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.232385] env[69784]: DEBUG nova.compute.manager [None req-c1197fe9-48b5-4f2d-8af7-f6b4bcd0c047 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] [instance: 59b297a2-1b8e-49e5-8c7d-2e585d109f94] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1310.273050] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f77bb094-be5e-4b32-b937-02427fa90214 tempest-ServersWithSpecificFlavorTestJSON-1442120785 tempest-ServersWithSpecificFlavorTestJSON-1442120785-project-member] Lock "ecec531e-41d9-47e3-b447-bc658edaea69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.274941] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "ecec531e-41d9-47e3-b447-bc658edaea69" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 37.384s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1310.275149] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ecec531e-41d9-47e3-b447-bc658edaea69] During sync_power_state the instance has a pending task (deleting). Skip. [ 1310.275361] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "ecec531e-41d9-47e3-b447-bc658edaea69" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.275687] env[69784]: DEBUG nova.compute.manager [None req-c1197fe9-48b5-4f2d-8af7-f6b4bcd0c047 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] [instance: 59b297a2-1b8e-49e5-8c7d-2e585d109f94] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1310.295129] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c1197fe9-48b5-4f2d-8af7-f6b4bcd0c047 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Lock "59b297a2-1b8e-49e5-8c7d-2e585d109f94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.993s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.303768] env[69784]: DEBUG nova.compute.manager [None req-91987234-59fd-4256-bbb5-c7d7bba2198d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 20518e8e-51f0-4d53-827f-f0c1a57b3bc4] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1310.328858] env[69784]: DEBUG nova.compute.manager [None req-91987234-59fd-4256-bbb5-c7d7bba2198d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 20518e8e-51f0-4d53-827f-f0c1a57b3bc4] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1310.350919] env[69784]: DEBUG oslo_concurrency.lockutils [None req-91987234-59fd-4256-bbb5-c7d7bba2198d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "20518e8e-51f0-4d53-827f-f0c1a57b3bc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.375s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.367272] env[69784]: DEBUG nova.compute.manager [None req-321dba4b-ff64-4362-8433-89a4626888fb tempest-ServerShowV257Test-938327008 tempest-ServerShowV257Test-938327008-project-member] [instance: cb507720-ad76-406c-b4f1-4d08fda1804d] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1310.390919] env[69784]: DEBUG nova.compute.manager [None req-321dba4b-ff64-4362-8433-89a4626888fb tempest-ServerShowV257Test-938327008 tempest-ServerShowV257Test-938327008-project-member] [instance: cb507720-ad76-406c-b4f1-4d08fda1804d] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1310.411188] env[69784]: DEBUG oslo_concurrency.lockutils [None req-321dba4b-ff64-4362-8433-89a4626888fb tempest-ServerShowV257Test-938327008 tempest-ServerShowV257Test-938327008-project-member] Lock "cb507720-ad76-406c-b4f1-4d08fda1804d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.715s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.420995] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1310.483087] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1310.483359] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1310.484959] env[69784]: INFO nova.compute.claims [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1310.777306] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca03d72-82f0-4770-9ab0-20bc5bdcb7b6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.785148] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265bcca0-2519-4812-9d39-174960b43944 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.815640] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7000d08c-6add-4965-a28f-d2cae6f30e95 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.823572] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57cdefe-c0db-4a64-9373-e08be41b061e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.835873] env[69784]: DEBUG nova.compute.provider_tree [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1310.844310] env[69784]: DEBUG nova.scheduler.client.report [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1310.862095] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.379s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1310.862600] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1310.916122] env[69784]: DEBUG nova.compute.utils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1310.917449] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1310.917667] env[69784]: DEBUG nova.network.neutron [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1310.927335] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1310.981470] env[69784]: DEBUG nova.policy [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ca8a3265c314ddb8bcdb6cd3b5781a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6fb8f53aa7bf4aba833d184b63d5faf5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1310.990522] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1311.017075] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1311.017367] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1311.017633] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1311.017709] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1311.017878] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1311.017998] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1311.018235] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1311.018403] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1311.018577] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1311.018746] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1311.018989] env[69784]: DEBUG nova.virt.hardware [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1311.019946] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64daed06-8c0e-4334-b763-28269ec6ee0b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.028059] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735f7ee8-212b-4e8b-b866-09bade1cdafb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.381844] env[69784]: DEBUG nova.network.neutron [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Successfully created port: 3f77415e-881f-438a-8850-af44893b2eb1 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1312.070524] env[69784]: DEBUG nova.compute.manager [req-5de23572-c817-4fc7-b406-434a17c6a585 req-6b32a611-5b2a-4b81-a0cc-404858bac105 service nova] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Received event network-vif-plugged-3f77415e-881f-438a-8850-af44893b2eb1 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1312.070774] env[69784]: DEBUG oslo_concurrency.lockutils [req-5de23572-c817-4fc7-b406-434a17c6a585 req-6b32a611-5b2a-4b81-a0cc-404858bac105 service nova] Acquiring lock "91546cc5-6e8b-4175-b256-ba19e98c22cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1312.070942] env[69784]: DEBUG oslo_concurrency.lockutils [req-5de23572-c817-4fc7-b406-434a17c6a585 req-6b32a611-5b2a-4b81-a0cc-404858bac105 service nova] Lock "91546cc5-6e8b-4175-b256-ba19e98c22cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1312.071046] env[69784]: DEBUG oslo_concurrency.lockutils [req-5de23572-c817-4fc7-b406-434a17c6a585 req-6b32a611-5b2a-4b81-a0cc-404858bac105 service nova] Lock "91546cc5-6e8b-4175-b256-ba19e98c22cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1312.071183] env[69784]: DEBUG nova.compute.manager [req-5de23572-c817-4fc7-b406-434a17c6a585 req-6b32a611-5b2a-4b81-a0cc-404858bac105 service nova] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] No waiting events found dispatching network-vif-plugged-3f77415e-881f-438a-8850-af44893b2eb1 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1312.071385] env[69784]: WARNING nova.compute.manager [req-5de23572-c817-4fc7-b406-434a17c6a585 req-6b32a611-5b2a-4b81-a0cc-404858bac105 service nova] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Received unexpected event network-vif-plugged-3f77415e-881f-438a-8850-af44893b2eb1 for instance with vm_state building and task_state spawning. [ 1312.077432] env[69784]: DEBUG nova.network.neutron [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Successfully updated port: 3f77415e-881f-438a-8850-af44893b2eb1 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1312.091521] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "refresh_cache-91546cc5-6e8b-4175-b256-ba19e98c22cc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1312.091682] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquired lock "refresh_cache-91546cc5-6e8b-4175-b256-ba19e98c22cc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1312.091832] env[69784]: DEBUG nova.network.neutron [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1312.132906] env[69784]: DEBUG nova.network.neutron [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1312.521644] env[69784]: DEBUG nova.network.neutron [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Updating instance_info_cache with network_info: [{"id": "3f77415e-881f-438a-8850-af44893b2eb1", "address": "fa:16:3e:1b:38:05", "network": {"id": "17c4aff7-c0bb-47ef-8da4-2cf89f3363d2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-908452625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6fb8f53aa7bf4aba833d184b63d5faf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f77415e-88", "ovs_interfaceid": "3f77415e-881f-438a-8850-af44893b2eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.535288] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Releasing lock "refresh_cache-91546cc5-6e8b-4175-b256-ba19e98c22cc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1312.535741] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Instance network_info: |[{"id": "3f77415e-881f-438a-8850-af44893b2eb1", "address": "fa:16:3e:1b:38:05", "network": {"id": "17c4aff7-c0bb-47ef-8da4-2cf89f3363d2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-908452625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6fb8f53aa7bf4aba833d184b63d5faf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f77415e-88", "ovs_interfaceid": "3f77415e-881f-438a-8850-af44893b2eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1312.536430] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:38:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f77415e-881f-438a-8850-af44893b2eb1', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1312.543776] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Creating folder: Project (6fb8f53aa7bf4aba833d184b63d5faf5). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1312.544248] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bfc433f-fb08-4dc9-9363-1b1d7a2374ce {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.554765] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Created folder: Project (6fb8f53aa7bf4aba833d184b63d5faf5) in parent group-v692547. [ 1312.554946] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Creating folder: Instances. Parent ref: group-v692614. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1312.555181] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4bdf85d7-a905-48fd-af8c-2c673e676b25 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.564223] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Created folder: Instances in parent group-v692614. [ 1312.564463] env[69784]: DEBUG oslo.service.loopingcall [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1312.564647] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1312.564831] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e94b8a97-aef0-4e33-ac2d-f32608dde3d6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.583305] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1312.583305] env[69784]: value = "task-3467122" [ 1312.583305] env[69784]: _type = "Task" [ 1312.583305] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.590664] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467122, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.093611] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467122, 'name': CreateVM_Task, 'duration_secs': 0.290456} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.093910] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1313.100100] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1313.100276] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1313.100621] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1313.101205] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b375c38-7a68-4ec3-a460-255b30e97b9f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.105607] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for the task: (returnval){ [ 1313.105607] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]528c12cb-5e02-4696-ec87-8808c1e0b2a6" [ 1313.105607] env[69784]: _type = "Task" [ 1313.105607] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.112950] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]528c12cb-5e02-4696-ec87-8808c1e0b2a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.616435] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1313.616691] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1313.616899] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1314.104964] env[69784]: DEBUG nova.compute.manager [req-518d7b73-5442-406a-ac9f-7befda2a506f req-d370d741-9a67-404a-90fe-0580535d5f22 service nova] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Received event network-changed-3f77415e-881f-438a-8850-af44893b2eb1 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1314.105254] env[69784]: DEBUG nova.compute.manager [req-518d7b73-5442-406a-ac9f-7befda2a506f req-d370d741-9a67-404a-90fe-0580535d5f22 service nova] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Refreshing instance network info cache due to event network-changed-3f77415e-881f-438a-8850-af44893b2eb1. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1314.105450] env[69784]: DEBUG oslo_concurrency.lockutils [req-518d7b73-5442-406a-ac9f-7befda2a506f req-d370d741-9a67-404a-90fe-0580535d5f22 service nova] Acquiring lock "refresh_cache-91546cc5-6e8b-4175-b256-ba19e98c22cc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1314.105527] env[69784]: DEBUG oslo_concurrency.lockutils [req-518d7b73-5442-406a-ac9f-7befda2a506f req-d370d741-9a67-404a-90fe-0580535d5f22 service nova] Acquired lock "refresh_cache-91546cc5-6e8b-4175-b256-ba19e98c22cc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1314.105708] env[69784]: DEBUG nova.network.neutron [req-518d7b73-5442-406a-ac9f-7befda2a506f req-d370d741-9a67-404a-90fe-0580535d5f22 service nova] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Refreshing network info cache for port 3f77415e-881f-438a-8850-af44893b2eb1 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1314.366580] env[69784]: DEBUG nova.network.neutron [req-518d7b73-5442-406a-ac9f-7befda2a506f req-d370d741-9a67-404a-90fe-0580535d5f22 service nova] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Updated VIF entry in instance network info cache for port 3f77415e-881f-438a-8850-af44893b2eb1. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1314.366979] env[69784]: DEBUG nova.network.neutron [req-518d7b73-5442-406a-ac9f-7befda2a506f req-d370d741-9a67-404a-90fe-0580535d5f22 service nova] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Updating instance_info_cache with network_info: [{"id": "3f77415e-881f-438a-8850-af44893b2eb1", "address": "fa:16:3e:1b:38:05", "network": {"id": "17c4aff7-c0bb-47ef-8da4-2cf89f3363d2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-908452625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6fb8f53aa7bf4aba833d184b63d5faf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f77415e-88", "ovs_interfaceid": "3f77415e-881f-438a-8850-af44893b2eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.376261] env[69784]: DEBUG oslo_concurrency.lockutils [req-518d7b73-5442-406a-ac9f-7befda2a506f req-d370d741-9a67-404a-90fe-0580535d5f22 service nova] Releasing lock "refresh_cache-91546cc5-6e8b-4175-b256-ba19e98c22cc" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1323.631401] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquiring lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1323.631715] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1327.522662] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "91546cc5-6e8b-4175-b256-ba19e98c22cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1335.841442] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.841711] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.842089] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1336.841769] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.841769] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.840067] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.834576] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.840953] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.855887] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1341.856152] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1341.856324] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1341.856479] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1341.857780] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e56bf2e-f26e-4658-8c84-36734bcf11e5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.866218] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83b74e9-2a1a-4056-a751-41ee497ca0ae {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.880444] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1634e7b8-ae4f-4ce3-b0e0-eb08481cb227 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.886834] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d07706-f854-41e5-a98e-8cfbcfe888f8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.916348] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180948MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1341.916500] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1341.916737] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1341.989686] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.989854] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.989984] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.990121] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.990244] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.990362] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.990479] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.990597] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.990711] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1341.990827] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1342.001112] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 8c4c0989-5269-4b6a-9b5a-778803657608 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.012627] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.021849] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.031016] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.039964] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.048813] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 96ccaa91-fd19-4793-9583-afb4d5708cd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.057696] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 3d3b940f-5376-4300-8b69-6cf40b0e2e31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.066096] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f04ff971-fae1-453a-a131-308618f24020 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.074653] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a32a5943-a449-4b47-820c-5a7e593c6443 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.083851] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1342.084111] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1342.084261] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1342.325733] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb0739f-ec9c-4776-87a5-510efddd8e51 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.332578] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38d9a33-2452-4fe5-b9c2-b93c6a4ab49b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.362431] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f39dbc-33dc-4a28-8c03-3ac84cf9f14e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.369528] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d95ddc-b089-435b-be88-0b09e2129a06 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.384272] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.393278] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1342.407243] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1342.407431] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.491s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1342.991413] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7053077b-65d1-4ed1-8db2-065893f4d97f tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "45611c65-5ce7-4e31-add1-29d5b9d87e5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1342.991724] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7053077b-65d1-4ed1-8db2-065893f4d97f tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "45611c65-5ce7-4e31-add1-29d5b9d87e5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1343.406404] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1343.406598] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1343.406725] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1343.428801] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.429586] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.429586] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.429586] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.429756] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.429807] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.429932] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.430098] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.430223] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.430342] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1343.430481] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1345.839548] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.863943] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1357.539867] env[69784]: WARNING oslo_vmware.rw_handles [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1357.539867] env[69784]: ERROR oslo_vmware.rw_handles [ 1357.540527] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1357.542165] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1357.542437] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Copying Virtual Disk [datastore1] vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/44d12b87-401b-4faf-94aa-b9e23a2340e8/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1357.542675] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-906a03eb-64ea-4ac1-a4c1-7bc4e30122eb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.550988] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Waiting for the task: (returnval){ [ 1357.550988] env[69784]: value = "task-3467123" [ 1357.550988] env[69784]: _type = "Task" [ 1357.550988] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.559181] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Task: {'id': task-3467123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.062045] env[69784]: DEBUG oslo_vmware.exceptions [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1358.062045] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1358.062462] env[69784]: ERROR nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1358.062462] env[69784]: Faults: ['InvalidArgument'] [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Traceback (most recent call last): [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] yield resources [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] self.driver.spawn(context, instance, image_meta, [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] self._fetch_image_if_missing(context, vi) [ 1358.062462] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] image_cache(vi, tmp_image_ds_loc) [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] vm_util.copy_virtual_disk( [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] session._wait_for_task(vmdk_copy_task) [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] return self.wait_for_task(task_ref) [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] return evt.wait() [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] result = hub.switch() [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1358.062843] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] return self.greenlet.switch() [ 1358.063261] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1358.063261] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] self.f(*self.args, **self.kw) [ 1358.063261] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1358.063261] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] raise exceptions.translate_fault(task_info.error) [ 1358.063261] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1358.063261] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Faults: ['InvalidArgument'] [ 1358.063261] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] [ 1358.063261] env[69784]: INFO nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Terminating instance [ 1358.064323] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1358.064529] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1358.065208] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1358.065394] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1358.065606] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91dc6fe7-f02c-4c3f-8be4-3e63c6dcbe47 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.067890] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb8c8bf-b0de-4257-a9fc-0ab4fa040b2d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.074528] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1358.074735] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a50b4ef1-9f2e-4b17-8fa5-1a9c218cb2aa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.076835] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1358.076973] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1358.077873] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa7e7f86-3ac8-4c76-881e-391abe8a605d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.082167] env[69784]: DEBUG oslo_vmware.api [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Waiting for the task: (returnval){ [ 1358.082167] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52132fd8-5463-9d3b-e8c0-b010db39fea7" [ 1358.082167] env[69784]: _type = "Task" [ 1358.082167] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.096129] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1358.096356] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Creating directory with path [datastore1] vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1358.096559] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3059eecd-2aa0-41c2-9a53-2bade8972010 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.107743] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Created directory with path [datastore1] vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1358.107934] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Fetch image to [datastore1] vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1358.108112] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1358.108822] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b12f66-cb91-4e56-aa1b-a923c42d3766 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.115447] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36de7657-0610-4cae-a5b9-2074078049d6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.125640] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb39484-ecb0-447d-b562-69939e85d927 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.158186] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6bdf834-5187-43a2-95b1-96e999a3bdec {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.160709] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1358.160908] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1358.161080] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Deleting the datastore file [datastore1] c68ee659-716d-47cc-a6a1-d4c18fa5664f {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1358.161362] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f6a0cd9-1f0c-4c1d-95df-50e18941c023 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.166011] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6bdf2207-debb-49bb-bf1b-87fa38212d58 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.168709] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Waiting for the task: (returnval){ [ 1358.168709] env[69784]: value = "task-3467125" [ 1358.168709] env[69784]: _type = "Task" [ 1358.168709] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.175903] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Task: {'id': task-3467125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.202946] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1358.366457] env[69784]: DEBUG oslo_vmware.rw_handles [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1358.432059] env[69784]: DEBUG oslo_vmware.rw_handles [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1358.432059] env[69784]: DEBUG oslo_vmware.rw_handles [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1358.553067] env[69784]: DEBUG oslo_concurrency.lockutils [None req-11194526-e975-47c7-ac62-b785ffaa1c2f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "f1af419b-6bc6-4ba5-9ec1-3011b3b055ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1358.553371] env[69784]: DEBUG oslo_concurrency.lockutils [None req-11194526-e975-47c7-ac62-b785ffaa1c2f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "f1af419b-6bc6-4ba5-9ec1-3011b3b055ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1358.678961] env[69784]: DEBUG oslo_vmware.api [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Task: {'id': task-3467125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076073} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.679616] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1358.679616] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1358.679799] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1358.679971] env[69784]: INFO nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1358.682179] env[69784]: DEBUG nova.compute.claims [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1358.682353] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1358.682565] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1358.964882] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a65449d-2336-4ff1-b71e-cc41a1d29897 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.972530] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffb3931-23d0-47fc-bdb7-9079d3c2f703 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.002830] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aba25df-96a9-4a74-8c68-411037315007 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.009723] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48c6215-118c-436a-bad0-8d27a0d48c21 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.022373] env[69784]: DEBUG nova.compute.provider_tree [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.032228] env[69784]: DEBUG nova.scheduler.client.report [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1359.045564] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.363s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.046115] env[69784]: ERROR nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1359.046115] env[69784]: Faults: ['InvalidArgument'] [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Traceback (most recent call last): [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] self.driver.spawn(context, instance, image_meta, [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] self._fetch_image_if_missing(context, vi) [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] image_cache(vi, tmp_image_ds_loc) [ 1359.046115] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] vm_util.copy_virtual_disk( [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] session._wait_for_task(vmdk_copy_task) [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] return self.wait_for_task(task_ref) [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] return evt.wait() [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] result = hub.switch() [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] return self.greenlet.switch() [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1359.046676] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] self.f(*self.args, **self.kw) [ 1359.047309] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1359.047309] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] raise exceptions.translate_fault(task_info.error) [ 1359.047309] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1359.047309] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Faults: ['InvalidArgument'] [ 1359.047309] env[69784]: ERROR nova.compute.manager [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] [ 1359.047309] env[69784]: DEBUG nova.compute.utils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1359.048185] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Build of instance c68ee659-716d-47cc-a6a1-d4c18fa5664f was re-scheduled: A specified parameter was not correct: fileType [ 1359.048185] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1359.048561] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1359.048729] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1359.048892] env[69784]: DEBUG nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1359.049070] env[69784]: DEBUG nova.network.neutron [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1359.378411] env[69784]: DEBUG nova.network.neutron [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.393820] env[69784]: INFO nova.compute.manager [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Took 0.34 seconds to deallocate network for instance. [ 1359.494758] env[69784]: INFO nova.scheduler.client.report [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Deleted allocations for instance c68ee659-716d-47cc-a6a1-d4c18fa5664f [ 1359.517371] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7bcc17ed-7bdf-4f17-85fa-9536178dd37d tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 668.325s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.518648] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 469.833s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1359.518895] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Acquiring lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1359.519122] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1359.519293] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.522624] env[69784]: INFO nova.compute.manager [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Terminating instance [ 1359.524320] env[69784]: DEBUG nova.compute.manager [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1359.524509] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1359.524758] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d803e1d2-e10e-4fee-b585-f9e49cde6f3d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.534247] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1f2ffe-8a61-4caa-a5ad-80ef3fc0b0fc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.547672] env[69784]: DEBUG nova.compute.manager [None req-56d397f7-2662-4b81-bfbf-4a7322d2e921 tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] [instance: 9b208c32-b233-4312-902a-3e4be0ddb23b] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1359.569438] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c68ee659-716d-47cc-a6a1-d4c18fa5664f could not be found. [ 1359.569655] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1359.569819] env[69784]: INFO nova.compute.manager [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1359.569972] env[69784]: DEBUG oslo.service.loopingcall [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1359.570235] env[69784]: DEBUG nova.compute.manager [-] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1359.570299] env[69784]: DEBUG nova.network.neutron [-] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1359.579347] env[69784]: DEBUG nova.compute.manager [None req-56d397f7-2662-4b81-bfbf-4a7322d2e921 tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] [instance: 9b208c32-b233-4312-902a-3e4be0ddb23b] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1359.593133] env[69784]: DEBUG nova.network.neutron [-] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.599882] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56d397f7-2662-4b81-bfbf-4a7322d2e921 tempest-SecurityGroupsTestJSON-1889373630 tempest-SecurityGroupsTestJSON-1889373630-project-member] Lock "9b208c32-b233-4312-902a-3e4be0ddb23b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.326s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.601302] env[69784]: INFO nova.compute.manager [-] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] Took 0.03 seconds to deallocate network for instance. [ 1359.611201] env[69784]: DEBUG nova.compute.manager [None req-83927e31-b126-4eb6-a686-b6c4d2b9b837 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] [instance: 2d4c834f-d9da-497f-8863-28c30e11c113] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1359.633610] env[69784]: DEBUG nova.compute.manager [None req-83927e31-b126-4eb6-a686-b6c4d2b9b837 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] [instance: 2d4c834f-d9da-497f-8863-28c30e11c113] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1359.651835] env[69784]: DEBUG oslo_concurrency.lockutils [None req-83927e31-b126-4eb6-a686-b6c4d2b9b837 tempest-AttachInterfacesTestJSON-1468351673 tempest-AttachInterfacesTestJSON-1468351673-project-member] Lock "2d4c834f-d9da-497f-8863-28c30e11c113" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.676s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.659940] env[69784]: DEBUG nova.compute.manager [None req-582556ac-5ebd-4902-969a-d3c2cc4a87a2 tempest-ServerActionsTestJSON-1763851275 tempest-ServerActionsTestJSON-1763851275-project-member] [instance: d165cf46-31ba-40e3-b7e9-fad5f05242ca] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1359.690167] env[69784]: DEBUG nova.compute.manager [None req-582556ac-5ebd-4902-969a-d3c2cc4a87a2 tempest-ServerActionsTestJSON-1763851275 tempest-ServerActionsTestJSON-1763851275-project-member] [instance: d165cf46-31ba-40e3-b7e9-fad5f05242ca] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1359.703735] env[69784]: DEBUG oslo_concurrency.lockutils [None req-56c473f7-bc51-4640-9a5d-d821b99658ee tempest-ServerPasswordTestJSON-32163534 tempest-ServerPasswordTestJSON-32163534-project-member] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.704805] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 86.813s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1359.705064] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c68ee659-716d-47cc-a6a1-d4c18fa5664f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1359.705255] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "c68ee659-716d-47cc-a6a1-d4c18fa5664f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.710942] env[69784]: DEBUG oslo_concurrency.lockutils [None req-582556ac-5ebd-4902-969a-d3c2cc4a87a2 tempest-ServerActionsTestJSON-1763851275 tempest-ServerActionsTestJSON-1763851275-project-member] Lock "d165cf46-31ba-40e3-b7e9-fad5f05242ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.456s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.719603] env[69784]: DEBUG nova.compute.manager [None req-888ef8bb-c597-455d-a70a-8aafca51dff7 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 8c4c0989-5269-4b6a-9b5a-778803657608] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1359.741910] env[69784]: DEBUG nova.compute.manager [None req-888ef8bb-c597-455d-a70a-8aafca51dff7 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 8c4c0989-5269-4b6a-9b5a-778803657608] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1359.761727] env[69784]: DEBUG oslo_concurrency.lockutils [None req-888ef8bb-c597-455d-a70a-8aafca51dff7 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "8c4c0989-5269-4b6a-9b5a-778803657608" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.489s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1359.770121] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1359.817478] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1359.817718] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1359.819115] env[69784]: INFO nova.compute.claims [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1360.079030] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c385c460-5869-4177-ac77-50485764c8f7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.086251] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f711c1a2-5850-4e2f-a48a-08a36a1f87b2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.115706] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b33af15-c20a-46f2-9a42-5b90e923f665 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.122195] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aee86bd-4fd6-4dde-a54d-2f7540a3eae3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.134779] env[69784]: DEBUG nova.compute.provider_tree [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1360.142813] env[69784]: DEBUG nova.scheduler.client.report [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1360.156451] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.339s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1360.156892] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1360.188739] env[69784]: DEBUG nova.compute.utils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1360.190365] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1360.190560] env[69784]: DEBUG nova.network.neutron [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1360.199506] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1360.256278] env[69784]: DEBUG nova.policy [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8048e8ae4a8a42cd92f2f680ab1bd05c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6314a935735741e8a24b236f7f668743', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1360.263761] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1360.289607] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1360.289865] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1360.290034] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1360.290223] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1360.290368] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1360.290514] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1360.290721] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1360.290880] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1360.291068] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1360.291237] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1360.291407] env[69784]: DEBUG nova.virt.hardware [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1360.292263] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b74a435-c4ee-430d-9d6b-ced40fcb0cac {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.299932] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8f5e42-ac05-4100-85d2-c52ab98e744f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.573055] env[69784]: DEBUG nova.network.neutron [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Successfully created port: 6b2c794f-105d-4429-bfdc-381f25b073bb {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1361.414112] env[69784]: DEBUG nova.compute.manager [req-da23c396-ed77-46bd-869a-754c920f0b0b req-3f546ee9-f08b-49f0-8f11-ac0d5f1d29c0 service nova] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Received event network-vif-plugged-6b2c794f-105d-4429-bfdc-381f25b073bb {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1361.414336] env[69784]: DEBUG oslo_concurrency.lockutils [req-da23c396-ed77-46bd-869a-754c920f0b0b req-3f546ee9-f08b-49f0-8f11-ac0d5f1d29c0 service nova] Acquiring lock "1473585c-f194-4396-b568-e8c1bc6d049b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1361.414548] env[69784]: DEBUG oslo_concurrency.lockutils [req-da23c396-ed77-46bd-869a-754c920f0b0b req-3f546ee9-f08b-49f0-8f11-ac0d5f1d29c0 service nova] Lock "1473585c-f194-4396-b568-e8c1bc6d049b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1361.414715] env[69784]: DEBUG oslo_concurrency.lockutils [req-da23c396-ed77-46bd-869a-754c920f0b0b req-3f546ee9-f08b-49f0-8f11-ac0d5f1d29c0 service nova] Lock "1473585c-f194-4396-b568-e8c1bc6d049b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1361.414927] env[69784]: DEBUG nova.compute.manager [req-da23c396-ed77-46bd-869a-754c920f0b0b req-3f546ee9-f08b-49f0-8f11-ac0d5f1d29c0 service nova] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] No waiting events found dispatching network-vif-plugged-6b2c794f-105d-4429-bfdc-381f25b073bb {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1361.416587] env[69784]: WARNING nova.compute.manager [req-da23c396-ed77-46bd-869a-754c920f0b0b req-3f546ee9-f08b-49f0-8f11-ac0d5f1d29c0 service nova] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Received unexpected event network-vif-plugged-6b2c794f-105d-4429-bfdc-381f25b073bb for instance with vm_state building and task_state spawning. [ 1361.468115] env[69784]: DEBUG nova.network.neutron [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Successfully updated port: 6b2c794f-105d-4429-bfdc-381f25b073bb {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1361.483805] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquiring lock "refresh_cache-1473585c-f194-4396-b568-e8c1bc6d049b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1361.483959] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquired lock "refresh_cache-1473585c-f194-4396-b568-e8c1bc6d049b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1361.484143] env[69784]: DEBUG nova.network.neutron [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1361.522662] env[69784]: DEBUG nova.network.neutron [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1361.728200] env[69784]: DEBUG nova.network.neutron [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Updating instance_info_cache with network_info: [{"id": "6b2c794f-105d-4429-bfdc-381f25b073bb", "address": "fa:16:3e:53:ca:b2", "network": {"id": "82b42852-e04f-4a7a-82f3-9f38416ad098", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-694745538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6314a935735741e8a24b236f7f668743", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b2c794f-10", "ovs_interfaceid": "6b2c794f-105d-4429-bfdc-381f25b073bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.739286] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Releasing lock "refresh_cache-1473585c-f194-4396-b568-e8c1bc6d049b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1361.739582] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Instance network_info: |[{"id": "6b2c794f-105d-4429-bfdc-381f25b073bb", "address": "fa:16:3e:53:ca:b2", "network": {"id": "82b42852-e04f-4a7a-82f3-9f38416ad098", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-694745538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6314a935735741e8a24b236f7f668743", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b2c794f-10", "ovs_interfaceid": "6b2c794f-105d-4429-bfdc-381f25b073bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1361.739968] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:ca:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd891a662-7da0-4be5-8d0d-01b4ff70552a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b2c794f-105d-4429-bfdc-381f25b073bb', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1361.747989] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Creating folder: Project (6314a935735741e8a24b236f7f668743). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1361.748575] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-052232b5-8455-4f97-b241-63a07ddc8dcd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.762077] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Created folder: Project (6314a935735741e8a24b236f7f668743) in parent group-v692547. [ 1361.762077] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Creating folder: Instances. Parent ref: group-v692617. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1361.762077] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da83a7db-6e8d-43bb-8a88-3b3785cbf51e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.771602] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Created folder: Instances in parent group-v692617. [ 1361.771914] env[69784]: DEBUG oslo.service.loopingcall [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1361.772174] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1361.772477] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-643f921a-3f0a-4f5a-942c-1ceebca76258 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.794973] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1361.794973] env[69784]: value = "task-3467128" [ 1361.794973] env[69784]: _type = "Task" [ 1361.794973] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.801791] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467128, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.304180] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467128, 'name': CreateVM_Task, 'duration_secs': 0.275643} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.304347] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1362.305044] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1362.305315] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1362.305534] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1362.305776] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e2d0f0f-3920-4ddd-9db0-bc6b52dd741c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.310018] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Waiting for the task: (returnval){ [ 1362.310018] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]527b0999-6ff5-8426-3b47-df6dbac03b96" [ 1362.310018] env[69784]: _type = "Task" [ 1362.310018] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.317382] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]527b0999-6ff5-8426-3b47-df6dbac03b96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.820673] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1362.820936] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1362.821159] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1363.441946] env[69784]: DEBUG nova.compute.manager [req-3fe58649-e4c6-4575-a7ff-597f6c8ff682 req-6ddec2cc-e954-42ab-85dc-731ba45dfa20 service nova] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Received event network-changed-6b2c794f-105d-4429-bfdc-381f25b073bb {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1363.442209] env[69784]: DEBUG nova.compute.manager [req-3fe58649-e4c6-4575-a7ff-597f6c8ff682 req-6ddec2cc-e954-42ab-85dc-731ba45dfa20 service nova] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Refreshing instance network info cache due to event network-changed-6b2c794f-105d-4429-bfdc-381f25b073bb. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1363.442430] env[69784]: DEBUG oslo_concurrency.lockutils [req-3fe58649-e4c6-4575-a7ff-597f6c8ff682 req-6ddec2cc-e954-42ab-85dc-731ba45dfa20 service nova] Acquiring lock "refresh_cache-1473585c-f194-4396-b568-e8c1bc6d049b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1363.442576] env[69784]: DEBUG oslo_concurrency.lockutils [req-3fe58649-e4c6-4575-a7ff-597f6c8ff682 req-6ddec2cc-e954-42ab-85dc-731ba45dfa20 service nova] Acquired lock "refresh_cache-1473585c-f194-4396-b568-e8c1bc6d049b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1363.442736] env[69784]: DEBUG nova.network.neutron [req-3fe58649-e4c6-4575-a7ff-597f6c8ff682 req-6ddec2cc-e954-42ab-85dc-731ba45dfa20 service nova] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Refreshing network info cache for port 6b2c794f-105d-4429-bfdc-381f25b073bb {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1363.676480] env[69784]: DEBUG nova.network.neutron [req-3fe58649-e4c6-4575-a7ff-597f6c8ff682 req-6ddec2cc-e954-42ab-85dc-731ba45dfa20 service nova] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Updated VIF entry in instance network info cache for port 6b2c794f-105d-4429-bfdc-381f25b073bb. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1363.676845] env[69784]: DEBUG nova.network.neutron [req-3fe58649-e4c6-4575-a7ff-597f6c8ff682 req-6ddec2cc-e954-42ab-85dc-731ba45dfa20 service nova] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Updating instance_info_cache with network_info: [{"id": "6b2c794f-105d-4429-bfdc-381f25b073bb", "address": "fa:16:3e:53:ca:b2", "network": {"id": "82b42852-e04f-4a7a-82f3-9f38416ad098", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-694745538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6314a935735741e8a24b236f7f668743", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b2c794f-10", "ovs_interfaceid": "6b2c794f-105d-4429-bfdc-381f25b073bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.686638] env[69784]: DEBUG oslo_concurrency.lockutils [req-3fe58649-e4c6-4575-a7ff-597f6c8ff682 req-6ddec2cc-e954-42ab-85dc-731ba45dfa20 service nova] Releasing lock "refresh_cache-1473585c-f194-4396-b568-e8c1bc6d049b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1396.839626] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.839927] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.840054] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.840384] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1398.840588] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.840595] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1401.835204] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.839578] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.850972] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1402.851213] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1402.851384] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1402.851538] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1402.852642] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a04aff-fa03-4b2b-8829-5d3539ad2a66 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.861247] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b36dcd-1f72-4c3c-8942-17b341823bf0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.876573] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c313d6-064e-460d-a597-785897c676bb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.882693] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ae0340-de05-4e6f-a61b-d7dc24569b30 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.911415] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180942MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1402.911586] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1402.911757] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1402.986239] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.986402] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c23e7041-ca02-4047-84d5-84b62f36b37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.986533] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7a640743-734e-4dc0-a965-0a71dddfb918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.986656] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.986774] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.986891] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.987082] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.987231] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.987350] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.987468] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1402.998993] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.010281] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.020462] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.030456] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 96ccaa91-fd19-4793-9583-afb4d5708cd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.041426] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 3d3b940f-5376-4300-8b69-6cf40b0e2e31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.050940] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f04ff971-fae1-453a-a131-308618f24020 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.060600] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a32a5943-a449-4b47-820c-5a7e593c6443 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.070671] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.080517] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 45611c65-5ce7-4e31-add1-29d5b9d87e5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.091858] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f1af419b-6bc6-4ba5-9ec1-3011b3b055ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1403.092130] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1403.092286] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1403.353129] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bf6314-b0f4-485d-95b2-65c3672af23d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.361355] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16c7192-fa20-41f2-bdf0-7f8e54f14ab2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.392549] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd6c26b-19d9-4a3b-872c-65b4a883a6af {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.399806] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c105d9e-6769-4d94-be2a-cf8bf5dec0d3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.412560] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1403.420828] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1403.435619] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1403.435797] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.524s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1403.504904] env[69784]: WARNING oslo_vmware.rw_handles [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1403.504904] env[69784]: ERROR oslo_vmware.rw_handles [ 1403.505360] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1403.507837] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1403.508137] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Copying Virtual Disk [datastore1] vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/604d2812-ee6e-416b-91c2-9b16fef35a4a/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1403.508440] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e914c91f-c8e6-4813-bcab-a6583e975a2c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.517936] env[69784]: DEBUG oslo_vmware.api [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Waiting for the task: (returnval){ [ 1403.517936] env[69784]: value = "task-3467129" [ 1403.517936] env[69784]: _type = "Task" [ 1403.517936] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.526081] env[69784]: DEBUG oslo_vmware.api [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Task: {'id': task-3467129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.028173] env[69784]: DEBUG oslo_vmware.exceptions [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1404.028532] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1404.029012] env[69784]: ERROR nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1404.029012] env[69784]: Faults: ['InvalidArgument'] [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Traceback (most recent call last): [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] yield resources [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] self.driver.spawn(context, instance, image_meta, [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] self._fetch_image_if_missing(context, vi) [ 1404.029012] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] image_cache(vi, tmp_image_ds_loc) [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] vm_util.copy_virtual_disk( [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] session._wait_for_task(vmdk_copy_task) [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] return self.wait_for_task(task_ref) [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] return evt.wait() [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] result = hub.switch() [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1404.029456] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] return self.greenlet.switch() [ 1404.029902] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1404.029902] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] self.f(*self.args, **self.kw) [ 1404.029902] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1404.029902] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] raise exceptions.translate_fault(task_info.error) [ 1404.029902] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1404.029902] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Faults: ['InvalidArgument'] [ 1404.029902] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] [ 1404.029902] env[69784]: INFO nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Terminating instance [ 1404.031165] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1404.031165] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.031307] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-764ad7b7-b4b1-43a3-a819-371392d9e012 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.033440] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1404.033636] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1404.034351] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4c9702-6941-4b42-b30e-e9dc6a56f239 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.041135] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1404.041351] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8097da4-19ec-475a-9092-1af25590eeb8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.043501] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.043675] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1404.044598] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44e652a6-5669-4052-abb2-110205aba2db {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.049156] env[69784]: DEBUG oslo_vmware.api [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for the task: (returnval){ [ 1404.049156] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52497fae-3d43-4003-e223-1ad7dedd3647" [ 1404.049156] env[69784]: _type = "Task" [ 1404.049156] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.111814] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1404.112066] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1404.112247] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Deleting the datastore file [datastore1] 16edc743-b24c-4a20-9046-f5d519bd7e9a {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1404.112516] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e3d0475-6011-4237-bf8d-e32ecc0d2430 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.119105] env[69784]: DEBUG oslo_vmware.api [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Waiting for the task: (returnval){ [ 1404.119105] env[69784]: value = "task-3467131" [ 1404.119105] env[69784]: _type = "Task" [ 1404.119105] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.126868] env[69784]: DEBUG oslo_vmware.api [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Task: {'id': task-3467131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.436888] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1404.437141] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1404.437311] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1404.461790] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.462050] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.462262] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.462465] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.462670] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.462874] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.463065] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.463228] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.463397] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.463572] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1404.463729] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1404.561017] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1404.561287] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Creating directory with path [datastore1] vmware_temp/8393a7b7-3b00-4880-b2cf-4a7b0465ba59/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.561524] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c606c820-227c-4872-b277-029d187161a6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.573419] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Created directory with path [datastore1] vmware_temp/8393a7b7-3b00-4880-b2cf-4a7b0465ba59/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.573637] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Fetch image to [datastore1] vmware_temp/8393a7b7-3b00-4880-b2cf-4a7b0465ba59/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1404.573833] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/8393a7b7-3b00-4880-b2cf-4a7b0465ba59/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1404.574568] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9be7d5-d4aa-4ce4-ba45-26fa10bdf747 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.581298] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d69c01-1df6-4b0f-bdd6-c5b9ad027344 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.590141] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529bdf06-fa68-4e93-b2c1-e98be0adf2d8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.623731] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6878d2-76d0-4a44-9771-5cd51f296d2d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.630652] env[69784]: DEBUG oslo_vmware.api [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Task: {'id': task-3467131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08313} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.632115] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1404.632308] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1404.632480] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1404.632702] env[69784]: INFO nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1404.636018] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-afbe8686-05bf-49dd-afb5-169c997f19fd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.636378] env[69784]: DEBUG nova.compute.claims [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1404.636553] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1404.636761] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1404.660113] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1404.791726] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1404.793277] env[69784]: ERROR nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = getattr(controller, method)(*args, **kwargs) [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._get(image_id) [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1404.793277] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] resp, body = self.http_client.get(url, headers=header) [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.request(url, 'GET', **kwargs) [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._handle_response(resp) [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exc.from_response(resp, resp.content) [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] During handling of the above exception, another exception occurred: [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1404.793532] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] yield resources [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self.driver.spawn(context, instance, image_meta, [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._fetch_image_if_missing(context, vi) [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image_fetch(context, vi, tmp_image_ds_loc) [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] images.fetch_image( [ 1404.793753] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] metadata = IMAGE_API.get(context, image_ref) [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return session.show(context, image_id, [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] _reraise_translated_image_exception(image_id) [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise new_exc.with_traceback(exc_trace) [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = getattr(controller, method)(*args, **kwargs) [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1404.794211] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._get(image_id) [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] resp, body = self.http_client.get(url, headers=header) [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.request(url, 'GET', **kwargs) [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._handle_response(resp) [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exc.from_response(resp, resp.content) [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1404.794459] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1404.794991] env[69784]: INFO nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Terminating instance [ 1404.795092] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1404.795312] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.796095] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1404.796301] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1404.796614] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0807f18c-62af-4656-a598-6756b3a4b8aa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.799388] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d06accc-725e-446c-a5a6-008db2bad098 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.808214] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1404.808401] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e96a179-fe26-4336-bb75-f22df39d4272 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.810576] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.810749] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1404.811946] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6815b2fa-b379-4354-9460-1d6bacce7984 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.818711] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Waiting for the task: (returnval){ [ 1404.818711] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52e82d10-b83f-16ec-4de4-579129e43ea4" [ 1404.818711] env[69784]: _type = "Task" [ 1404.818711] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.826020] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52e82d10-b83f-16ec-4de4-579129e43ea4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.912825] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2349236d-3bb0-46e5-8217-d325e53a56f3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.922150] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a3a583-40e6-419d-a660-2d8e0db0526d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.951967] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0018f55f-776a-47d8-9157-5885b53d35d4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.959467] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9355ea7a-05be-436d-ab82-8d62864967b7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.972352] env[69784]: DEBUG nova.compute.provider_tree [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1404.980827] env[69784]: DEBUG nova.scheduler.client.report [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1404.993997] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.357s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1404.994523] env[69784]: ERROR nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1404.994523] env[69784]: Faults: ['InvalidArgument'] [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Traceback (most recent call last): [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] self.driver.spawn(context, instance, image_meta, [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] self._fetch_image_if_missing(context, vi) [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] image_cache(vi, tmp_image_ds_loc) [ 1404.994523] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] vm_util.copy_virtual_disk( [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] session._wait_for_task(vmdk_copy_task) [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] return self.wait_for_task(task_ref) [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] return evt.wait() [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] result = hub.switch() [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] return self.greenlet.switch() [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1404.994810] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] self.f(*self.args, **self.kw) [ 1404.995184] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1404.995184] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] raise exceptions.translate_fault(task_info.error) [ 1404.995184] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1404.995184] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Faults: ['InvalidArgument'] [ 1404.995184] env[69784]: ERROR nova.compute.manager [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] [ 1404.995306] env[69784]: DEBUG nova.compute.utils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1404.996564] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Build of instance 16edc743-b24c-4a20-9046-f5d519bd7e9a was re-scheduled: A specified parameter was not correct: fileType [ 1404.996564] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1404.996945] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1404.997134] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1404.997308] env[69784]: DEBUG nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1404.997472] env[69784]: DEBUG nova.network.neutron [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1405.304129] env[69784]: DEBUG nova.network.neutron [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.315276] env[69784]: INFO nova.compute.manager [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Took 0.32 seconds to deallocate network for instance. [ 1405.329448] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1405.329719] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Creating directory with path [datastore1] vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1405.329951] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e1e067f-1c15-4b16-b0ac-37602ee00b53 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.349705] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Created directory with path [datastore1] vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1405.349912] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Fetch image to [datastore1] vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1405.350100] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1405.351445] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e7c4b0-720b-4e17-af3f-d207ee28c381 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.361766] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485e0bcd-12fd-4a14-9dae-772cb34dfbfb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.373531] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c731f8-0d5a-4961-9d0c-2b735aa53be1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.407952] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1857df4c-d712-4695-99a8-5f5a518b73d1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.414620] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-59d91e94-2406-4b16-be85-ffd16cca7b76 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.432100] env[69784]: INFO nova.scheduler.client.report [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Deleted allocations for instance 16edc743-b24c-4a20-9046-f5d519bd7e9a [ 1405.445787] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1405.458383] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d7188ada-02cb-4381-9647-5d20a24ee461 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 687.027s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1405.458745] env[69784]: DEBUG oslo_concurrency.lockutils [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 483.182s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1405.459067] env[69784]: DEBUG oslo_concurrency.lockutils [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Acquiring lock "16edc743-b24c-4a20-9046-f5d519bd7e9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1405.459191] env[69784]: DEBUG oslo_concurrency.lockutils [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1405.459508] env[69784]: DEBUG oslo_concurrency.lockutils [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1405.462214] env[69784]: INFO nova.compute.manager [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Terminating instance [ 1405.464099] env[69784]: DEBUG nova.compute.manager [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1405.464351] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1405.464726] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11fbfc50-3852-4fc8-8174-d5cc454dcc88 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.473573] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce726fdf-7d95-4b93-a4fa-8a9fbcee826a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.490358] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1405.512285] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 16edc743-b24c-4a20-9046-f5d519bd7e9a could not be found. [ 1405.512515] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1405.512698] env[69784]: INFO nova.compute.manager [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1405.512945] env[69784]: DEBUG oslo.service.loopingcall [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1405.515229] env[69784]: DEBUG nova.compute.manager [-] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1405.515335] env[69784]: DEBUG nova.network.neutron [-] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1405.530354] env[69784]: DEBUG oslo_vmware.rw_handles [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1405.586304] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1405.586553] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1405.588083] env[69784]: INFO nova.compute.claims [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1405.593691] env[69784]: DEBUG oslo_vmware.rw_handles [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1405.593855] env[69784]: DEBUG oslo_vmware.rw_handles [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1405.600066] env[69784]: DEBUG nova.network.neutron [-] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.611086] env[69784]: INFO nova.compute.manager [-] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] Took 0.10 seconds to deallocate network for instance. [ 1405.700729] env[69784]: DEBUG oslo_concurrency.lockutils [None req-de3e6b67-53ae-4eae-afe5-2a6cfd5f7e07 tempest-ListServerFiltersTestJSON-356601045 tempest-ListServerFiltersTestJSON-356601045-project-member] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.242s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1405.701602] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 132.810s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1405.701766] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 16edc743-b24c-4a20-9046-f5d519bd7e9a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1405.701939] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "16edc743-b24c-4a20-9046-f5d519bd7e9a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1405.839188] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1405.860330] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf1efe3-737d-4b28-a4b3-2bd7d7f50cd0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.868228] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937ab5b6-61b0-46ed-b9e8-03eafc86a023 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.897887] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bd1f7b-f3ef-406b-9921-ca5354465c46 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.904718] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb1ecfe-a1c0-4a53-8922-2442b049ec19 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.917703] env[69784]: DEBUG nova.compute.provider_tree [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1405.926985] env[69784]: DEBUG nova.scheduler.client.report [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1405.940198] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.354s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1405.940685] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1405.975359] env[69784]: DEBUG nova.compute.utils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1405.975844] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Not allocating networking since 'none' was specified. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 1405.984034] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1406.046813] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1406.069328] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1406.069583] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1406.069742] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1406.069921] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1406.070082] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1406.070233] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1406.070442] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1406.070599] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1406.070766] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1406.070930] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1406.071115] env[69784]: DEBUG nova.virt.hardware [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1406.071982] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addb036f-49d3-40d1-aba0-7c3fbdecaf44 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.079768] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc03ef0c-25c4-4e21-a327-eb05156747e3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.093291] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Instance VIF info [] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1406.098619] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Creating folder: Project (129f749691294e8b824eb77d753358b2). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1406.098869] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45544eb0-5e0c-453b-bca3-69948de09507 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.107917] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Created folder: Project (129f749691294e8b824eb77d753358b2) in parent group-v692547. [ 1406.108112] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Creating folder: Instances. Parent ref: group-v692620. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1406.108313] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-418ee56c-c7aa-4ebf-82f8-a4b4e4ca93e3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.115457] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Created folder: Instances in parent group-v692620. [ 1406.115684] env[69784]: DEBUG oslo.service.loopingcall [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1406.115857] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1406.116366] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4e974ee-122a-44e6-8a30-10ddac8d3644 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.130944] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1406.130944] env[69784]: value = "task-3467135" [ 1406.130944] env[69784]: _type = "Task" [ 1406.130944] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.137590] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467135, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.640653] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467135, 'name': CreateVM_Task, 'duration_secs': 0.244901} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.640935] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1406.641358] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1406.641525] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1406.642124] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1406.642404] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62282a1f-2f42-4b16-ad73-bbe890499d57 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.646815] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Waiting for the task: (returnval){ [ 1406.646815] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5294ae7e-9afe-5d2e-a9b8-05fee91e2293" [ 1406.646815] env[69784]: _type = "Task" [ 1406.646815] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.654197] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5294ae7e-9afe-5d2e-a9b8-05fee91e2293, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.157423] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1407.157692] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1407.157915] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1407.483325] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1407.483542] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1407.483726] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Deleting the datastore file [datastore1] c23e7041-ca02-4047-84d5-84b62f36b37f {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1407.484337] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cdc36a0-95d6-40c1-91b8-55349d843faa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.490281] env[69784]: DEBUG oslo_vmware.api [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for the task: (returnval){ [ 1407.490281] env[69784]: value = "task-3467136" [ 1407.490281] env[69784]: _type = "Task" [ 1407.490281] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.497970] env[69784]: DEBUG oslo_vmware.api [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Task: {'id': task-3467136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.000590] env[69784]: DEBUG oslo_vmware.api [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Task: {'id': task-3467136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087079} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.000853] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1408.001045] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1408.001225] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1408.001397] env[69784]: INFO nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Took 3.21 seconds to destroy the instance on the hypervisor. [ 1408.003827] env[69784]: DEBUG nova.compute.claims [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1408.004016] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1408.004240] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1408.253122] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f32bf28-6fef-4daa-b92e-2a7eaef4a00a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.260957] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89156cd1-5f73-497c-9bed-c84e07766697 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.290949] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2039fa-467f-4ac4-8f61-8a027c5a1d64 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.297894] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3a3509-2324-40d1-8f73-26ff97b6ad27 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.310921] env[69784]: DEBUG nova.compute.provider_tree [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1408.321571] env[69784]: DEBUG nova.scheduler.client.report [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1408.336950] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.333s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1408.337662] env[69784]: ERROR nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = getattr(controller, method)(*args, **kwargs) [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._get(image_id) [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1408.337662] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] resp, body = self.http_client.get(url, headers=header) [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.request(url, 'GET', **kwargs) [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._handle_response(resp) [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exc.from_response(resp, resp.content) [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] During handling of the above exception, another exception occurred: [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.337946] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self.driver.spawn(context, instance, image_meta, [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._fetch_image_if_missing(context, vi) [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image_fetch(context, vi, tmp_image_ds_loc) [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] images.fetch_image( [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] metadata = IMAGE_API.get(context, image_ref) [ 1408.338240] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return session.show(context, image_id, [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] _reraise_translated_image_exception(image_id) [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise new_exc.with_traceback(exc_trace) [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = getattr(controller, method)(*args, **kwargs) [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._get(image_id) [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1408.338572] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] resp, body = self.http_client.get(url, headers=header) [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.request(url, 'GET', **kwargs) [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._handle_response(resp) [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exc.from_response(resp, resp.content) [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1408.338880] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.339133] env[69784]: DEBUG nova.compute.utils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1408.339723] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Build of instance c23e7041-ca02-4047-84d5-84b62f36b37f was re-scheduled: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1408.340199] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1408.340374] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1408.340529] env[69784]: DEBUG nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1408.340690] env[69784]: DEBUG nova.network.neutron [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1408.460055] env[69784]: DEBUG neutronclient.v2_0.client [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69784) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1408.462522] env[69784]: ERROR nova.compute.manager [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = getattr(controller, method)(*args, **kwargs) [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._get(image_id) [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1408.462522] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] resp, body = self.http_client.get(url, headers=header) [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.request(url, 'GET', **kwargs) [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._handle_response(resp) [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exc.from_response(resp, resp.content) [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] During handling of the above exception, another exception occurred: [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.462947] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self.driver.spawn(context, instance, image_meta, [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._fetch_image_if_missing(context, vi) [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image_fetch(context, vi, tmp_image_ds_loc) [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] images.fetch_image( [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] metadata = IMAGE_API.get(context, image_ref) [ 1408.463212] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return session.show(context, image_id, [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] _reraise_translated_image_exception(image_id) [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise new_exc.with_traceback(exc_trace) [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = getattr(controller, method)(*args, **kwargs) [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._get(image_id) [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1408.463579] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] resp, body = self.http_client.get(url, headers=header) [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.request(url, 'GET', **kwargs) [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self._handle_response(resp) [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exc.from_response(resp, resp.content) [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] During handling of the above exception, another exception occurred: [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.463888] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._build_and_run_instance(context, instance, image, [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exception.RescheduledException( [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] nova.exception.RescheduledException: Build of instance c23e7041-ca02-4047-84d5-84b62f36b37f was re-scheduled: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] During handling of the above exception, another exception occurred: [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1408.464198] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] exception_handler_v20(status_code, error_body) [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise client_exc(message=error_message, [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Neutron server returns request_ids: ['req-13346b7b-185f-4d0d-a6e4-762b62cdab49'] [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] During handling of the above exception, another exception occurred: [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._deallocate_network(context, instance, requested_networks) [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self.network_api.deallocate_for_instance( [ 1408.464560] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] data = neutron.list_ports(**search_opts) [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.list('ports', self.ports_path, retrieve_all, [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] for r in self._pagination(collection, path, **params): [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] res = self.get(path, params=params) [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.464945] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.retry_request("GET", action, body=body, [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.do_request(method, action, body=body, [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._handle_fault_response(status_code, replybody, resp) [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exception.Unauthorized() [ 1408.471328] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] nova.exception.Unauthorized: Not authorized. [ 1408.471641] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.538977] env[69784]: INFO nova.scheduler.client.report [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Deleted allocations for instance c23e7041-ca02-4047-84d5-84b62f36b37f [ 1408.569180] env[69784]: DEBUG oslo_concurrency.lockutils [None req-67be7644-1ba2-4821-a114-a1d4d720ae04 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 592.423s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1408.570313] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 396.367s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1408.570527] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Acquiring lock "c23e7041-ca02-4047-84d5-84b62f36b37f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1408.570728] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1408.571768] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1408.572801] env[69784]: INFO nova.compute.manager [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Terminating instance [ 1408.574491] env[69784]: DEBUG nova.compute.manager [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1408.574676] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1408.575149] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52c355cb-4100-41c8-96bf-3d1f475898c3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.583675] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74f7418-2164-4e01-9780-00df68cd62ca {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.600934] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1408.612730] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c23e7041-ca02-4047-84d5-84b62f36b37f could not be found. [ 1408.612937] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1408.613081] env[69784]: INFO nova.compute.manager [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1408.613324] env[69784]: DEBUG oslo.service.loopingcall [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1408.613741] env[69784]: DEBUG nova.compute.manager [-] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1408.613842] env[69784]: DEBUG nova.network.neutron [-] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1408.664048] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1408.664387] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1408.666902] env[69784]: INFO nova.compute.claims [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.734778] env[69784]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69784) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1408.735062] env[69784]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-0c8170fe-8789-4a6a-8758-676e61297651'] [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1408.735812] env[69784]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1408.736275] env[69784]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1408.736903] env[69784]: ERROR oslo.service.loopingcall [ 1408.737309] env[69784]: ERROR nova.compute.manager [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1408.766475] env[69784]: ERROR nova.compute.manager [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] exception_handler_v20(status_code, error_body) [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise client_exc(message=error_message, [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1408.766475] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Neutron server returns request_ids: ['req-0c8170fe-8789-4a6a-8758-676e61297651'] [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] During handling of the above exception, another exception occurred: [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Traceback (most recent call last): [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._delete_instance(context, instance, bdms) [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._shutdown_instance(context, instance, bdms) [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._try_deallocate_network(context, instance, requested_networks) [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] with excutils.save_and_reraise_exception(): [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1408.767509] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self.force_reraise() [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise self.value [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] _deallocate_network_with_retries() [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return evt.wait() [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = hub.switch() [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.greenlet.switch() [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1408.767793] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = func(*self.args, **self.kw) [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] result = f(*args, **kwargs) [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._deallocate_network( [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self.network_api.deallocate_for_instance( [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] data = neutron.list_ports(**search_opts) [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.list('ports', self.ports_path, retrieve_all, [ 1408.768052] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] for r in self._pagination(collection, path, **params): [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] res = self.get(path, params=params) [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.retry_request("GET", action, body=body, [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1408.768358] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] return self.do_request(method, action, body=body, [ 1408.768637] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.768637] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] ret = obj(*args, **kwargs) [ 1408.768637] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1408.768637] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] self._handle_fault_response(status_code, replybody, resp) [ 1408.768637] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1408.768637] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1408.768637] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1408.768637] env[69784]: ERROR nova.compute.manager [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] [ 1408.796430] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.226s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1408.797450] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 135.905s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1408.797634] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1408.797808] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "c23e7041-ca02-4047-84d5-84b62f36b37f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1408.860534] env[69784]: INFO nova.compute.manager [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] [instance: c23e7041-ca02-4047-84d5-84b62f36b37f] Successfully reverted task state from None on failure for instance. [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server [None req-ea71bd0c-9a93-4150-a2fc-717afe913bf0 tempest-DeleteServersAdminTestJSON-1856813495 tempest-DeleteServersAdminTestJSON-1856813495-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-0c8170fe-8789-4a6a-8758-676e61297651'] [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1408.864113] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1408.864518] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1408.864898] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1408.865310] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1408.865758] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1408.866199] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1408.866687] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1408.866687] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1408.866687] env[69784]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1408.866687] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1408.866687] env[69784]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1408.866687] env[69784]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1408.866687] env[69784]: ERROR oslo_messaging.rpc.server [ 1408.925054] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746b1977-f515-4901-bd8f-003f019c0071 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.932374] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437627e1-4945-4cee-b27b-dceccfdaa1f8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.961777] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0ec0f5-dae3-48f5-89ab-402c30e9e095 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.968669] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d03fed-1edf-4980-a954-593adf0f1400 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.981401] env[69784]: DEBUG nova.compute.provider_tree [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1408.989690] env[69784]: DEBUG nova.scheduler.client.report [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1409.010885] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.346s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1409.011394] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1409.045876] env[69784]: DEBUG nova.compute.utils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1409.046718] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1409.046875] env[69784]: DEBUG nova.network.neutron [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1409.056751] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1409.127530] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1409.134347] env[69784]: DEBUG nova.policy [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37964134670041448aded713253aafca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db837a27374b45328a331b77d7403e47', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1409.172962] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1409.173253] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1409.173386] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1409.173578] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1409.173720] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1409.173860] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1409.174076] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1409.174237] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1409.174399] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1409.174589] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1409.174792] env[69784]: DEBUG nova.virt.hardware [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1409.175729] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00ff11d-c1c9-4320-a7e8-ee05099cf416 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.184159] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae65b897-245c-426c-841c-9fc56c23aab3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.594705] env[69784]: DEBUG nova.network.neutron [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Successfully created port: 15ee2fe1-6beb-4182-ad10-b534abaa7529 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1410.277703] env[69784]: DEBUG nova.compute.manager [req-04ea0616-9036-4cb1-a806-92322b0caf56 req-549bd052-8b9f-4fa7-8096-a9cfb77834a9 service nova] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Received event network-vif-plugged-15ee2fe1-6beb-4182-ad10-b534abaa7529 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1410.277968] env[69784]: DEBUG oslo_concurrency.lockutils [req-04ea0616-9036-4cb1-a806-92322b0caf56 req-549bd052-8b9f-4fa7-8096-a9cfb77834a9 service nova] Acquiring lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1410.278128] env[69784]: DEBUG oslo_concurrency.lockutils [req-04ea0616-9036-4cb1-a806-92322b0caf56 req-549bd052-8b9f-4fa7-8096-a9cfb77834a9 service nova] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1410.278302] env[69784]: DEBUG oslo_concurrency.lockutils [req-04ea0616-9036-4cb1-a806-92322b0caf56 req-549bd052-8b9f-4fa7-8096-a9cfb77834a9 service nova] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1410.278475] env[69784]: DEBUG nova.compute.manager [req-04ea0616-9036-4cb1-a806-92322b0caf56 req-549bd052-8b9f-4fa7-8096-a9cfb77834a9 service nova] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] No waiting events found dispatching network-vif-plugged-15ee2fe1-6beb-4182-ad10-b534abaa7529 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1410.278641] env[69784]: WARNING nova.compute.manager [req-04ea0616-9036-4cb1-a806-92322b0caf56 req-549bd052-8b9f-4fa7-8096-a9cfb77834a9 service nova] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Received unexpected event network-vif-plugged-15ee2fe1-6beb-4182-ad10-b534abaa7529 for instance with vm_state building and task_state spawning. [ 1410.341571] env[69784]: DEBUG nova.network.neutron [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Successfully updated port: 15ee2fe1-6beb-4182-ad10-b534abaa7529 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1410.365275] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquiring lock "refresh_cache-161991fb-77d5-4a18-b0f3-d2346c8d3b68" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1410.365464] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquired lock "refresh_cache-161991fb-77d5-4a18-b0f3-d2346c8d3b68" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1410.365598] env[69784]: DEBUG nova.network.neutron [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1410.399086] env[69784]: DEBUG nova.network.neutron [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1410.580949] env[69784]: DEBUG nova.network.neutron [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Updating instance_info_cache with network_info: [{"id": "15ee2fe1-6beb-4182-ad10-b534abaa7529", "address": "fa:16:3e:69:89:5b", "network": {"id": "4b99708c-7cc0-4fec-a1a0-a340be15f532", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2118472796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db837a27374b45328a331b77d7403e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15ee2fe1-6b", "ovs_interfaceid": "15ee2fe1-6beb-4182-ad10-b534abaa7529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.593659] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Releasing lock "refresh_cache-161991fb-77d5-4a18-b0f3-d2346c8d3b68" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1410.593782] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Instance network_info: |[{"id": "15ee2fe1-6beb-4182-ad10-b534abaa7529", "address": "fa:16:3e:69:89:5b", "network": {"id": "4b99708c-7cc0-4fec-a1a0-a340be15f532", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2118472796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db837a27374b45328a331b77d7403e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15ee2fe1-6b", "ovs_interfaceid": "15ee2fe1-6beb-4182-ad10-b534abaa7529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1410.594158] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:89:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15ee2fe1-6beb-4182-ad10-b534abaa7529', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1410.601493] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Creating folder: Project (db837a27374b45328a331b77d7403e47). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1410.601988] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-caae91fc-e814-4a1b-80ad-e919bc87aaee {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.612510] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Created folder: Project (db837a27374b45328a331b77d7403e47) in parent group-v692547. [ 1410.612693] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Creating folder: Instances. Parent ref: group-v692623. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1410.612901] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d00be08-049c-4ca3-954b-412fdae41978 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.621615] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Created folder: Instances in parent group-v692623. [ 1410.621827] env[69784]: DEBUG oslo.service.loopingcall [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1410.621999] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1410.622199] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05fdde74-a8dd-4977-b6ff-5048d7435756 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.640770] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1410.640770] env[69784]: value = "task-3467139" [ 1410.640770] env[69784]: _type = "Task" [ 1410.640770] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.647745] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467139, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.150516] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467139, 'name': CreateVM_Task, 'duration_secs': 0.434261} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.150727] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1411.151433] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1411.151608] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1411.151918] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1411.152182] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-807309d6-cd52-43ad-ae50-c9f871faf9d8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.156321] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Waiting for the task: (returnval){ [ 1411.156321] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52ec5bcc-dc95-bb14-1bf0-516e75a4fb08" [ 1411.156321] env[69784]: _type = "Task" [ 1411.156321] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.163477] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52ec5bcc-dc95-bb14-1bf0-516e75a4fb08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.668116] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1411.668399] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1411.668565] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1412.258233] env[69784]: DEBUG oslo_concurrency.lockutils [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquiring lock "1473585c-f194-4396-b568-e8c1bc6d049b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1412.335971] env[69784]: DEBUG nova.compute.manager [req-fcd7bfa0-7190-455b-a18e-250bc90ffbeb req-341dd44f-b7ce-491f-851c-ef815f706792 service nova] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Received event network-changed-15ee2fe1-6beb-4182-ad10-b534abaa7529 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1412.336178] env[69784]: DEBUG nova.compute.manager [req-fcd7bfa0-7190-455b-a18e-250bc90ffbeb req-341dd44f-b7ce-491f-851c-ef815f706792 service nova] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Refreshing instance network info cache due to event network-changed-15ee2fe1-6beb-4182-ad10-b534abaa7529. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1412.336386] env[69784]: DEBUG oslo_concurrency.lockutils [req-fcd7bfa0-7190-455b-a18e-250bc90ffbeb req-341dd44f-b7ce-491f-851c-ef815f706792 service nova] Acquiring lock "refresh_cache-161991fb-77d5-4a18-b0f3-d2346c8d3b68" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1412.336559] env[69784]: DEBUG oslo_concurrency.lockutils [req-fcd7bfa0-7190-455b-a18e-250bc90ffbeb req-341dd44f-b7ce-491f-851c-ef815f706792 service nova] Acquired lock "refresh_cache-161991fb-77d5-4a18-b0f3-d2346c8d3b68" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1412.336728] env[69784]: DEBUG nova.network.neutron [req-fcd7bfa0-7190-455b-a18e-250bc90ffbeb req-341dd44f-b7ce-491f-851c-ef815f706792 service nova] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Refreshing network info cache for port 15ee2fe1-6beb-4182-ad10-b534abaa7529 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1412.651448] env[69784]: DEBUG nova.network.neutron [req-fcd7bfa0-7190-455b-a18e-250bc90ffbeb req-341dd44f-b7ce-491f-851c-ef815f706792 service nova] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Updated VIF entry in instance network info cache for port 15ee2fe1-6beb-4182-ad10-b534abaa7529. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1412.651810] env[69784]: DEBUG nova.network.neutron [req-fcd7bfa0-7190-455b-a18e-250bc90ffbeb req-341dd44f-b7ce-491f-851c-ef815f706792 service nova] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Updating instance_info_cache with network_info: [{"id": "15ee2fe1-6beb-4182-ad10-b534abaa7529", "address": "fa:16:3e:69:89:5b", "network": {"id": "4b99708c-7cc0-4fec-a1a0-a340be15f532", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2118472796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db837a27374b45328a331b77d7403e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15ee2fe1-6b", "ovs_interfaceid": "15ee2fe1-6beb-4182-ad10-b534abaa7529", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.664108] env[69784]: DEBUG oslo_concurrency.lockutils [req-fcd7bfa0-7190-455b-a18e-250bc90ffbeb req-341dd44f-b7ce-491f-851c-ef815f706792 service nova] Releasing lock "refresh_cache-161991fb-77d5-4a18-b0f3-d2346c8d3b68" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1412.671872] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "697cd7aa-d710-4e46-b241-085961a8631d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1413.424057] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquiring lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1455.631556] env[69784]: WARNING oslo_vmware.rw_handles [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1455.631556] env[69784]: ERROR oslo_vmware.rw_handles [ 1455.632193] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1455.634212] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1455.634477] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Copying Virtual Disk [datastore1] vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/4bb0b377-0789-4d46-909d-0a0267cfae1c/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1455.634797] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfdd1fd0-a0be-495d-a5fa-7e876135333e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.642965] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Waiting for the task: (returnval){ [ 1455.642965] env[69784]: value = "task-3467140" [ 1455.642965] env[69784]: _type = "Task" [ 1455.642965] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.650848] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Task: {'id': task-3467140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.154913] env[69784]: DEBUG oslo_vmware.exceptions [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1456.155239] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1456.155809] env[69784]: ERROR nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1456.155809] env[69784]: Faults: ['InvalidArgument'] [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Traceback (most recent call last): [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] yield resources [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] self.driver.spawn(context, instance, image_meta, [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] self._fetch_image_if_missing(context, vi) [ 1456.155809] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] image_cache(vi, tmp_image_ds_loc) [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] vm_util.copy_virtual_disk( [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] session._wait_for_task(vmdk_copy_task) [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] return self.wait_for_task(task_ref) [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] return evt.wait() [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] result = hub.switch() [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1456.156565] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] return self.greenlet.switch() [ 1456.156928] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1456.156928] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] self.f(*self.args, **self.kw) [ 1456.156928] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1456.156928] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] raise exceptions.translate_fault(task_info.error) [ 1456.156928] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1456.156928] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Faults: ['InvalidArgument'] [ 1456.156928] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] [ 1456.156928] env[69784]: INFO nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Terminating instance [ 1456.157760] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1456.158655] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1456.158919] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-404c4858-22c8-40c6-8996-f0093697d763 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.161131] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1456.161328] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1456.162050] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e523876-3ff3-4799-8b90-ba968001b1b8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.168889] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1456.169119] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0be4da72-8057-4003-ac9d-5dad60cfa994 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.171324] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1456.171472] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1456.172515] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6226d987-673b-4083-a09f-940d006dda2d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.177126] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Waiting for the task: (returnval){ [ 1456.177126] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52bf83ec-4e10-8a09-7290-6a34cb0e7cef" [ 1456.177126] env[69784]: _type = "Task" [ 1456.177126] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.184480] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52bf83ec-4e10-8a09-7290-6a34cb0e7cef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.238082] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1456.238352] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1456.238546] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Deleting the datastore file [datastore1] 7a640743-734e-4dc0-a965-0a71dddfb918 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1456.238810] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba84275e-971e-4935-8adc-5f89281b0a37 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.244974] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Waiting for the task: (returnval){ [ 1456.244974] env[69784]: value = "task-3467142" [ 1456.244974] env[69784]: _type = "Task" [ 1456.244974] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.254567] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Task: {'id': task-3467142, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.688043] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1456.688411] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Creating directory with path [datastore1] vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1456.688634] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eac521ea-9b9e-4c12-a969-945a993b07a8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.701013] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Created directory with path [datastore1] vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1456.701214] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Fetch image to [datastore1] vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1456.701385] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1456.702112] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b8dc7e-e759-4c5e-a1af-d9efd2b8b23e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.708564] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c018a283-8bad-46c6-be87-eb5785bfda3a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.717541] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0144f2ad-8869-445b-b607-859463e020f9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.750210] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9dfc9c-c433-455d-99ab-03422ae0a0f3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.757085] env[69784]: DEBUG oslo_vmware.api [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Task: {'id': task-3467142, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078152} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.758481] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1456.758671] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1456.758844] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1456.759035] env[69784]: INFO nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1456.760776] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-29b05cb6-beac-450a-a820-04783319dc58 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.762652] env[69784]: DEBUG nova.compute.claims [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1456.762820] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1456.763042] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1456.788089] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1456.839561] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1456.843814] env[69784]: DEBUG oslo_vmware.rw_handles [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1456.902966] env[69784]: DEBUG oslo_vmware.rw_handles [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1456.903171] env[69784]: DEBUG oslo_vmware.rw_handles [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1457.055071] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d33b039-71ee-4a73-8f9b-e0934aeb7696 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.062692] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7a8869-1620-48eb-9b3f-4951cda7bc14 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.091512] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207a20c8-1369-4243-ade0-f29e79faf7f6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.098447] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8785d102-804a-436b-ae7f-5b5327285173 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.111007] env[69784]: DEBUG nova.compute.provider_tree [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.122973] env[69784]: DEBUG nova.scheduler.client.report [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1457.137752] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.374s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1457.138336] env[69784]: ERROR nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1457.138336] env[69784]: Faults: ['InvalidArgument'] [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Traceback (most recent call last): [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] self.driver.spawn(context, instance, image_meta, [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] self._fetch_image_if_missing(context, vi) [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] image_cache(vi, tmp_image_ds_loc) [ 1457.138336] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] vm_util.copy_virtual_disk( [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] session._wait_for_task(vmdk_copy_task) [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] return self.wait_for_task(task_ref) [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] return evt.wait() [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] result = hub.switch() [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] return self.greenlet.switch() [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1457.138638] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] self.f(*self.args, **self.kw) [ 1457.138912] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1457.138912] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] raise exceptions.translate_fault(task_info.error) [ 1457.138912] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1457.138912] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Faults: ['InvalidArgument'] [ 1457.138912] env[69784]: ERROR nova.compute.manager [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] [ 1457.139045] env[69784]: DEBUG nova.compute.utils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1457.140475] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Build of instance 7a640743-734e-4dc0-a965-0a71dddfb918 was re-scheduled: A specified parameter was not correct: fileType [ 1457.140475] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1457.140885] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1457.141076] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1457.141235] env[69784]: DEBUG nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1457.141397] env[69784]: DEBUG nova.network.neutron [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1457.571725] env[69784]: DEBUG nova.network.neutron [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.584016] env[69784]: INFO nova.compute.manager [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Took 0.44 seconds to deallocate network for instance. [ 1457.684337] env[69784]: INFO nova.scheduler.client.report [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Deleted allocations for instance 7a640743-734e-4dc0-a965-0a71dddfb918 [ 1457.704340] env[69784]: DEBUG oslo_concurrency.lockutils [None req-79030d59-63e4-45ed-919b-28b3a5937515 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "7a640743-734e-4dc0-a965-0a71dddfb918" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 590.591s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1457.705441] env[69784]: DEBUG oslo_concurrency.lockutils [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "7a640743-734e-4dc0-a965-0a71dddfb918" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 395.228s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1457.705658] env[69784]: DEBUG oslo_concurrency.lockutils [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Acquiring lock "7a640743-734e-4dc0-a965-0a71dddfb918-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1457.705862] env[69784]: DEBUG oslo_concurrency.lockutils [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "7a640743-734e-4dc0-a965-0a71dddfb918-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1457.706096] env[69784]: DEBUG oslo_concurrency.lockutils [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "7a640743-734e-4dc0-a965-0a71dddfb918-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1457.708036] env[69784]: INFO nova.compute.manager [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Terminating instance [ 1457.709635] env[69784]: DEBUG nova.compute.manager [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1457.709825] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1457.710319] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81d373bc-ece6-45a2-8053-71ef298fca57 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.716230] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1457.722322] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243ecbc6-e269-42e3-b791-8baa68dfa9ca {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.754434] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7a640743-734e-4dc0-a965-0a71dddfb918 could not be found. [ 1457.754643] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1457.754825] env[69784]: INFO nova.compute.manager [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1457.755087] env[69784]: DEBUG oslo.service.loopingcall [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.759475] env[69784]: DEBUG nova.compute.manager [-] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1457.759580] env[69784]: DEBUG nova.network.neutron [-] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1457.772769] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1457.773043] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1457.774521] env[69784]: INFO nova.compute.claims [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1457.784124] env[69784]: DEBUG nova.network.neutron [-] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.795198] env[69784]: INFO nova.compute.manager [-] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] Took 0.04 seconds to deallocate network for instance. [ 1457.839113] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.841568] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.841721] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1457.889493] env[69784]: DEBUG oslo_concurrency.lockutils [None req-516c03ac-39b6-4cb5-9b6d-4fe4ea5bd8e1 tempest-ServersAdminNegativeTestJSON-1592925029 tempest-ServersAdminNegativeTestJSON-1592925029-project-member] Lock "7a640743-734e-4dc0-a965-0a71dddfb918" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1457.890352] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "7a640743-734e-4dc0-a965-0a71dddfb918" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 184.998s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1457.890537] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7a640743-734e-4dc0-a965-0a71dddfb918] During sync_power_state the instance has a pending task (deleting). Skip. [ 1457.890796] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "7a640743-734e-4dc0-a965-0a71dddfb918" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1458.026422] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966450f3-b14d-4121-bbaf-0c595aca487e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.034202] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83d0ab7-6848-4a24-9bb8-1ae7851b651f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.767803] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7141e57e-448d-4b2e-808e-562ac0ce4814 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.775524] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415b356b-962e-4e81-af18-277143575171 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.788392] env[69784]: DEBUG nova.compute.provider_tree [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1458.798429] env[69784]: DEBUG nova.scheduler.client.report [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1458.813240] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.040s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1458.813715] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1458.844878] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.850831] env[69784]: DEBUG nova.compute.utils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1458.852081] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1458.852250] env[69784]: DEBUG nova.network.neutron [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1458.860157] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1458.947318] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1458.954152] env[69784]: DEBUG nova.policy [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65818d4594bf40a8ae5562292e4a3afd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a22dfedb0c914a08bb54f5599a59aec9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1458.972329] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1458.972538] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1458.972691] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1458.972868] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1458.973013] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1458.973165] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1458.973365] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1458.973519] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1458.973681] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1458.973858] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1458.974056] env[69784]: DEBUG nova.virt.hardware [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1458.974883] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248ae4e5-e4bd-49e7-9e3b-a6ac617b47cf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.982927] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29c9430-824f-4191-bec1-9231b7e91694 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.336450] env[69784]: DEBUG nova.network.neutron [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Successfully created port: a4a8ce74-f67d-447a-b898-d10dd445a39f {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1459.975303] env[69784]: DEBUG nova.compute.manager [req-ea9b8bf1-6f34-459c-8547-2d31637ddf14 req-76b7664f-2413-4d3a-979b-be9244c97ba2 service nova] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Received event network-vif-plugged-a4a8ce74-f67d-447a-b898-d10dd445a39f {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1459.975557] env[69784]: DEBUG oslo_concurrency.lockutils [req-ea9b8bf1-6f34-459c-8547-2d31637ddf14 req-76b7664f-2413-4d3a-979b-be9244c97ba2 service nova] Acquiring lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1459.975739] env[69784]: DEBUG oslo_concurrency.lockutils [req-ea9b8bf1-6f34-459c-8547-2d31637ddf14 req-76b7664f-2413-4d3a-979b-be9244c97ba2 service nova] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1459.975939] env[69784]: DEBUG oslo_concurrency.lockutils [req-ea9b8bf1-6f34-459c-8547-2d31637ddf14 req-76b7664f-2413-4d3a-979b-be9244c97ba2 service nova] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1459.976226] env[69784]: DEBUG nova.compute.manager [req-ea9b8bf1-6f34-459c-8547-2d31637ddf14 req-76b7664f-2413-4d3a-979b-be9244c97ba2 service nova] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] No waiting events found dispatching network-vif-plugged-a4a8ce74-f67d-447a-b898-d10dd445a39f {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1459.976412] env[69784]: WARNING nova.compute.manager [req-ea9b8bf1-6f34-459c-8547-2d31637ddf14 req-76b7664f-2413-4d3a-979b-be9244c97ba2 service nova] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Received unexpected event network-vif-plugged-a4a8ce74-f67d-447a-b898-d10dd445a39f for instance with vm_state building and task_state spawning. [ 1460.052016] env[69784]: DEBUG nova.network.neutron [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Successfully updated port: a4a8ce74-f67d-447a-b898-d10dd445a39f {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1460.062954] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "refresh_cache-6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1460.063122] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquired lock "refresh_cache-6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1460.063277] env[69784]: DEBUG nova.network.neutron [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1460.101460] env[69784]: DEBUG nova.network.neutron [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1460.287687] env[69784]: DEBUG nova.network.neutron [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Updating instance_info_cache with network_info: [{"id": "a4a8ce74-f67d-447a-b898-d10dd445a39f", "address": "fa:16:3e:ce:52:12", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4a8ce74-f6", "ovs_interfaceid": "a4a8ce74-f67d-447a-b898-d10dd445a39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.299870] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Releasing lock "refresh_cache-6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1460.300165] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Instance network_info: |[{"id": "a4a8ce74-f67d-447a-b898-d10dd445a39f", "address": "fa:16:3e:ce:52:12", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4a8ce74-f6", "ovs_interfaceid": "a4a8ce74-f67d-447a-b898-d10dd445a39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1460.300540] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:52:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4a8ce74-f67d-447a-b898-d10dd445a39f', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1460.307823] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Creating folder: Project (a22dfedb0c914a08bb54f5599a59aec9). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1460.308354] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2926c01-9540-4e82-accb-e814d2255567 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.319050] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Created folder: Project (a22dfedb0c914a08bb54f5599a59aec9) in parent group-v692547. [ 1460.319236] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Creating folder: Instances. Parent ref: group-v692626. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1460.319445] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0911a276-0df9-4518-b27c-3c0e4260c0a9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.328503] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Created folder: Instances in parent group-v692626. [ 1460.328718] env[69784]: DEBUG oslo.service.loopingcall [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.328887] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1460.329086] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecba7e3f-cc52-403e-8d23-d032f7b5e0a6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.347436] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1460.347436] env[69784]: value = "task-3467145" [ 1460.347436] env[69784]: _type = "Task" [ 1460.347436] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.354404] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467145, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.858126] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467145, 'name': CreateVM_Task, 'duration_secs': 0.282561} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.858126] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1460.858126] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1460.858475] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1460.858589] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1460.858772] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d921bae-e653-4996-bf5e-de988445e231 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.862974] env[69784]: DEBUG oslo_vmware.api [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Waiting for the task: (returnval){ [ 1460.862974] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]528d7a11-5681-7ecd-18ed-3567f2154015" [ 1460.862974] env[69784]: _type = "Task" [ 1460.862974] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.870019] env[69784]: DEBUG oslo_vmware.api [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]528d7a11-5681-7ecd-18ed-3567f2154015, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.373103] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1461.373463] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1461.373552] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1461.840611] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1462.007881] env[69784]: DEBUG nova.compute.manager [req-08a1fc90-a5bb-42cf-99da-345e3d6f54b6 req-83e2556b-01c5-45cb-b3a3-89a59ab4b2bb service nova] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Received event network-changed-a4a8ce74-f67d-447a-b898-d10dd445a39f {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1462.008116] env[69784]: DEBUG nova.compute.manager [req-08a1fc90-a5bb-42cf-99da-345e3d6f54b6 req-83e2556b-01c5-45cb-b3a3-89a59ab4b2bb service nova] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Refreshing instance network info cache due to event network-changed-a4a8ce74-f67d-447a-b898-d10dd445a39f. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1462.008299] env[69784]: DEBUG oslo_concurrency.lockutils [req-08a1fc90-a5bb-42cf-99da-345e3d6f54b6 req-83e2556b-01c5-45cb-b3a3-89a59ab4b2bb service nova] Acquiring lock "refresh_cache-6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1462.008442] env[69784]: DEBUG oslo_concurrency.lockutils [req-08a1fc90-a5bb-42cf-99da-345e3d6f54b6 req-83e2556b-01c5-45cb-b3a3-89a59ab4b2bb service nova] Acquired lock "refresh_cache-6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1462.008598] env[69784]: DEBUG nova.network.neutron [req-08a1fc90-a5bb-42cf-99da-345e3d6f54b6 req-83e2556b-01c5-45cb-b3a3-89a59ab4b2bb service nova] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Refreshing network info cache for port a4a8ce74-f67d-447a-b898-d10dd445a39f {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1462.491009] env[69784]: DEBUG nova.network.neutron [req-08a1fc90-a5bb-42cf-99da-345e3d6f54b6 req-83e2556b-01c5-45cb-b3a3-89a59ab4b2bb service nova] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Updated VIF entry in instance network info cache for port a4a8ce74-f67d-447a-b898-d10dd445a39f. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1462.491388] env[69784]: DEBUG nova.network.neutron [req-08a1fc90-a5bb-42cf-99da-345e3d6f54b6 req-83e2556b-01c5-45cb-b3a3-89a59ab4b2bb service nova] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Updating instance_info_cache with network_info: [{"id": "a4a8ce74-f67d-447a-b898-d10dd445a39f", "address": "fa:16:3e:ce:52:12", "network": {"id": "e0a27684-7bce-47e5-aa25-b3e5b50d3019", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7b0f6ea1d8724018ae13e62fe7220317", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4a8ce74-f6", "ovs_interfaceid": "a4a8ce74-f67d-447a-b898-d10dd445a39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.500635] env[69784]: DEBUG oslo_concurrency.lockutils [req-08a1fc90-a5bb-42cf-99da-345e3d6f54b6 req-83e2556b-01c5-45cb-b3a3-89a59ab4b2bb service nova] Releasing lock "refresh_cache-6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1462.839596] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1462.853346] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1462.853582] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1462.853759] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1462.854027] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1462.855041] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509748e6-ef77-47a6-b1f7-8132c010e35c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.864022] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05916d6-d1eb-4969-881b-045acc2477eb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.878373] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5270fc-3565-44cf-92d0-0dcce7ee310d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.884612] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ccdbcd-a5a6-4448-beb5-9ab2e5fe0a59 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.913768] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180933MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1462.913930] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1462.914155] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1462.994675] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 288af650-a19b-4ce5-baea-013dcaa6e908 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.994837] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.994962] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.995099] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.995220] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.995335] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.995451] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.995569] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.995683] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1462.995796] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.008168] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 96ccaa91-fd19-4793-9583-afb4d5708cd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1463.019893] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 3d3b940f-5376-4300-8b69-6cf40b0e2e31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1463.029845] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f04ff971-fae1-453a-a131-308618f24020 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1463.043078] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a32a5943-a449-4b47-820c-5a7e593c6443 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1463.054372] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1463.064050] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 45611c65-5ce7-4e31-add1-29d5b9d87e5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1463.074985] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f1af419b-6bc6-4ba5-9ec1-3011b3b055ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1463.075310] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1463.075407] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1463.235502] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1463.284536] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9212ac9-6587-4196-8263-aea0718a70b1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.292328] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15068725-8f68-445f-a44d-b7f2696eaa0c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.323460] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dca131-d70a-48c9-9533-0035c7da6364 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.331662] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8ef3df-410e-4f68-bf52-7fb6f6395c84 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.344879] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1463.353137] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1463.368775] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1463.368998] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.455s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1464.363723] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1464.364202] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1464.364413] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1464.364627] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1464.387481] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.387664] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.387774] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.387897] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.388031] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.388158] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.388302] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.388468] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.388603] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.388721] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1464.388839] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1467.839671] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.836305] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.521947] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1470.522304] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1488.077818] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquiring lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1488.078129] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1496.235415] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1496.235700] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1499.208337] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2372b56a-d9df-4b7a-8f42-bda9cbfa2da0 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] Acquiring lock "6d7f1207-ba9a-4d1c-9499-6c0677fd38ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1499.208690] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2372b56a-d9df-4b7a-8f42-bda9cbfa2da0 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] Lock "6d7f1207-ba9a-4d1c-9499-6c0677fd38ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1505.650055] env[69784]: WARNING oslo_vmware.rw_handles [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1505.650055] env[69784]: ERROR oslo_vmware.rw_handles [ 1505.650055] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1505.652009] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1505.652273] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Copying Virtual Disk [datastore1] vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/7e879a57-1abb-4d47-a041-30fcc176b89a/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1505.652575] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f838da30-0705-4697-9ffd-fe2cbfc7f8f4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.660495] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Waiting for the task: (returnval){ [ 1505.660495] env[69784]: value = "task-3467146" [ 1505.660495] env[69784]: _type = "Task" [ 1505.660495] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.668609] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Task: {'id': task-3467146, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.170730] env[69784]: DEBUG oslo_vmware.exceptions [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1506.171108] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1506.171686] env[69784]: ERROR nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1506.171686] env[69784]: Faults: ['InvalidArgument'] [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Traceback (most recent call last): [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] yield resources [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] self.driver.spawn(context, instance, image_meta, [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] self._fetch_image_if_missing(context, vi) [ 1506.171686] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] image_cache(vi, tmp_image_ds_loc) [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] vm_util.copy_virtual_disk( [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] session._wait_for_task(vmdk_copy_task) [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] return self.wait_for_task(task_ref) [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] return evt.wait() [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] result = hub.switch() [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1506.171997] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] return self.greenlet.switch() [ 1506.172591] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1506.172591] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] self.f(*self.args, **self.kw) [ 1506.172591] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1506.172591] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] raise exceptions.translate_fault(task_info.error) [ 1506.172591] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1506.172591] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Faults: ['InvalidArgument'] [ 1506.172591] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] [ 1506.172591] env[69784]: INFO nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Terminating instance [ 1506.174179] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1506.174387] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1506.174687] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1506.174879] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1506.175593] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacfd270-2d54-43e6-8c58-6fda25326040 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.178091] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d6f2539-c8c9-47f2-ab3c-2a57a4037d07 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.183673] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1506.183898] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2506f812-5fcf-4a1a-9368-b09d567840e9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.186443] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1506.186630] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1506.187288] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9854f35a-914e-4673-99df-8f03a55c4250 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.191995] env[69784]: DEBUG oslo_vmware.api [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Waiting for the task: (returnval){ [ 1506.191995] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5215e3c7-15dc-2e57-869e-a34ee889044e" [ 1506.191995] env[69784]: _type = "Task" [ 1506.191995] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.206232] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1506.206484] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Creating directory with path [datastore1] vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1506.206690] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cebe0687-d143-4f3c-91a1-933b757feb90 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.225462] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Created directory with path [datastore1] vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1506.225656] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Fetch image to [datastore1] vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1506.225861] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1506.226807] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4de5a81-bbbf-4a68-b751-c02077c26973 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.233328] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfda166-3137-425a-91c2-37ea011fedbe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.241951] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1c2936-9f2a-4084-86c4-a06a6efbf31d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.273946] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c610431b-68d6-4ef4-a67f-0ad2b64c0c87 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.276362] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1506.276489] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1506.276676] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Deleting the datastore file [datastore1] 288af650-a19b-4ce5-baea-013dcaa6e908 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1506.276918] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7123620d-3b41-45b7-a073-420e39c6f951 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.281439] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5da24325-bdee-4b88-9e8b-a0e086955191 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.284115] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Waiting for the task: (returnval){ [ 1506.284115] env[69784]: value = "task-3467148" [ 1506.284115] env[69784]: _type = "Task" [ 1506.284115] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.291376] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Task: {'id': task-3467148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.306754] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1506.357533] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1506.419588] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1506.419859] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1506.794702] env[69784]: DEBUG oslo_vmware.api [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Task: {'id': task-3467148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067933} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.794977] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1506.795141] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1506.795312] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1506.795486] env[69784]: INFO nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1506.797662] env[69784]: DEBUG nova.compute.claims [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1506.797834] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1506.798059] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1507.032085] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be0f66f-1345-4552-a836-efbb9b37724e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.039506] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c8cdf1-ef20-4de4-9889-14455e21e87c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.068302] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86052021-36cc-41db-b66a-00a1dc1fbfde {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.075158] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f5d47f-24d6-4fa6-9d88-73861b0213c5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.088755] env[69784]: DEBUG nova.compute.provider_tree [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1507.096861] env[69784]: DEBUG nova.scheduler.client.report [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1507.110268] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.312s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.110789] env[69784]: ERROR nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1507.110789] env[69784]: Faults: ['InvalidArgument'] [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Traceback (most recent call last): [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] self.driver.spawn(context, instance, image_meta, [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] self._fetch_image_if_missing(context, vi) [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] image_cache(vi, tmp_image_ds_loc) [ 1507.110789] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] vm_util.copy_virtual_disk( [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] session._wait_for_task(vmdk_copy_task) [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] return self.wait_for_task(task_ref) [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] return evt.wait() [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] result = hub.switch() [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] return self.greenlet.switch() [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1507.111165] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] self.f(*self.args, **self.kw) [ 1507.111499] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1507.111499] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] raise exceptions.translate_fault(task_info.error) [ 1507.111499] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1507.111499] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Faults: ['InvalidArgument'] [ 1507.111499] env[69784]: ERROR nova.compute.manager [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] [ 1507.111499] env[69784]: DEBUG nova.compute.utils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1507.113234] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Build of instance 288af650-a19b-4ce5-baea-013dcaa6e908 was re-scheduled: A specified parameter was not correct: fileType [ 1507.113234] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1507.113607] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1507.113781] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1507.113950] env[69784]: DEBUG nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1507.114127] env[69784]: DEBUG nova.network.neutron [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1507.417184] env[69784]: DEBUG nova.network.neutron [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.429943] env[69784]: INFO nova.compute.manager [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Took 0.32 seconds to deallocate network for instance. [ 1507.535617] env[69784]: INFO nova.scheduler.client.report [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Deleted allocations for instance 288af650-a19b-4ce5-baea-013dcaa6e908 [ 1507.558114] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d29c010f-0293-4220-842b-7f0cc03ecb25 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "288af650-a19b-4ce5-baea-013dcaa6e908" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 615.769s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.558803] env[69784]: DEBUG oslo_concurrency.lockutils [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "288af650-a19b-4ce5-baea-013dcaa6e908" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 419.781s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1507.559049] env[69784]: DEBUG oslo_concurrency.lockutils [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Acquiring lock "288af650-a19b-4ce5-baea-013dcaa6e908-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1507.559264] env[69784]: DEBUG oslo_concurrency.lockutils [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "288af650-a19b-4ce5-baea-013dcaa6e908-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1507.559434] env[69784]: DEBUG oslo_concurrency.lockutils [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "288af650-a19b-4ce5-baea-013dcaa6e908-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.561664] env[69784]: INFO nova.compute.manager [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Terminating instance [ 1507.563755] env[69784]: DEBUG nova.compute.manager [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1507.563977] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1507.565105] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a613051-e1f7-41f6-87e4-46d69186745a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.568967] env[69784]: DEBUG nova.compute.manager [None req-d9758d6e-bb0e-4e07-91bd-04c5c71d2340 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 96ccaa91-fd19-4793-9583-afb4d5708cd1] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1507.575131] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305d0a46-b5f1-4e31-90ab-c12ea5b53d66 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.604750] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 288af650-a19b-4ce5-baea-013dcaa6e908 could not be found. [ 1507.604750] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1507.604750] env[69784]: INFO nova.compute.manager [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1507.604905] env[69784]: DEBUG oslo.service.loopingcall [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.605229] env[69784]: DEBUG nova.compute.manager [None req-d9758d6e-bb0e-4e07-91bd-04c5c71d2340 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 96ccaa91-fd19-4793-9583-afb4d5708cd1] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1507.606560] env[69784]: DEBUG nova.compute.manager [-] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1507.606560] env[69784]: DEBUG nova.network.neutron [-] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1507.633081] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d9758d6e-bb0e-4e07-91bd-04c5c71d2340 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "96ccaa91-fd19-4793-9583-afb4d5708cd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.209s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.634443] env[69784]: DEBUG nova.network.neutron [-] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.641737] env[69784]: INFO nova.compute.manager [-] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] Took 0.04 seconds to deallocate network for instance. [ 1507.643765] env[69784]: DEBUG nova.compute.manager [None req-e24dea2d-74c9-4f93-a134-b459b6b8bcb3 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] [instance: 3d3b940f-5376-4300-8b69-6cf40b0e2e31] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1507.666048] env[69784]: DEBUG nova.compute.manager [None req-e24dea2d-74c9-4f93-a134-b459b6b8bcb3 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] [instance: 3d3b940f-5376-4300-8b69-6cf40b0e2e31] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1507.687986] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e24dea2d-74c9-4f93-a134-b459b6b8bcb3 tempest-ImagesTestJSON-82243885 tempest-ImagesTestJSON-82243885-project-member] Lock "3d3b940f-5376-4300-8b69-6cf40b0e2e31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.216s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.700319] env[69784]: DEBUG nova.compute.manager [None req-91368744-5eb0-4443-a864-f40c9603df9f tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: f04ff971-fae1-453a-a131-308618f24020] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1507.725507] env[69784]: DEBUG nova.compute.manager [None req-91368744-5eb0-4443-a864-f40c9603df9f tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: f04ff971-fae1-453a-a131-308618f24020] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1507.741774] env[69784]: DEBUG oslo_concurrency.lockutils [None req-17026cf7-0b00-4636-8056-1432b0612965 tempest-VolumesAdminNegativeTest-1960791252 tempest-VolumesAdminNegativeTest-1960791252-project-member] Lock "288af650-a19b-4ce5-baea-013dcaa6e908" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.743184] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "288af650-a19b-4ce5-baea-013dcaa6e908" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 234.850s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1507.743184] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 288af650-a19b-4ce5-baea-013dcaa6e908] During sync_power_state the instance has a pending task (deleting). Skip. [ 1507.743184] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "288af650-a19b-4ce5-baea-013dcaa6e908" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.749933] env[69784]: DEBUG oslo_concurrency.lockutils [None req-91368744-5eb0-4443-a864-f40c9603df9f tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "f04ff971-fae1-453a-a131-308618f24020" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.438s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.758999] env[69784]: DEBUG nova.compute.manager [None req-bb7d4401-b86d-4454-b065-4c6e889e0127 tempest-ServerRescueTestJSON-660458893 tempest-ServerRescueTestJSON-660458893-project-member] [instance: a32a5943-a449-4b47-820c-5a7e593c6443] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1507.784342] env[69784]: DEBUG nova.compute.manager [None req-bb7d4401-b86d-4454-b065-4c6e889e0127 tempest-ServerRescueTestJSON-660458893 tempest-ServerRescueTestJSON-660458893-project-member] [instance: a32a5943-a449-4b47-820c-5a7e593c6443] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1507.808133] env[69784]: DEBUG oslo_concurrency.lockutils [None req-bb7d4401-b86d-4454-b065-4c6e889e0127 tempest-ServerRescueTestJSON-660458893 tempest-ServerRescueTestJSON-660458893-project-member] Lock "a32a5943-a449-4b47-820c-5a7e593c6443" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.403s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1507.819061] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1507.866242] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1507.866523] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1507.868051] env[69784]: INFO nova.compute.claims [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1508.086555] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7194c2c-6709-4ff3-913a-63ef56d25e70 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.094332] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55331523-7baf-422d-bc06-97d44fc0b195 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.123118] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79fe324-6b34-49c4-93d7-a819541dd1af {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.129634] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1942ace-1f34-4752-8714-9b105811f2fb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.142787] env[69784]: DEBUG nova.compute.provider_tree [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.151052] env[69784]: DEBUG nova.scheduler.client.report [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1508.166656] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1508.167187] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1508.199459] env[69784]: DEBUG nova.compute.utils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1508.200962] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1508.201150] env[69784]: DEBUG nova.network.neutron [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1508.210051] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1508.256942] env[69784]: DEBUG nova.policy [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd27e5d7453504151b21a81a6123eb13f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff1231ca52524d8c8422253f0e3e05dd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1508.274088] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1508.297351] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1508.297634] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1508.297882] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1508.297975] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1508.298134] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1508.298283] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1508.298505] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1508.298688] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1508.298856] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1508.299028] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1508.299209] env[69784]: DEBUG nova.virt.hardware [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1508.300058] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2895bb75-32d8-4340-8aa5-fe4a7602deb7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.308172] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57414c5d-c41b-47d6-9563-506638fad79c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.544294] env[69784]: DEBUG nova.network.neutron [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Successfully created port: d9d6ba0c-88dc-45a8-9da2-1babafc861a9 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1509.265264] env[69784]: DEBUG nova.network.neutron [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Successfully updated port: d9d6ba0c-88dc-45a8-9da2-1babafc861a9 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1509.275627] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquiring lock "refresh_cache-93ea7e73-f280-4e22-9ac7-f1be9926a158" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1509.275786] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquired lock "refresh_cache-93ea7e73-f280-4e22-9ac7-f1be9926a158" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1509.275933] env[69784]: DEBUG nova.network.neutron [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1509.317361] env[69784]: DEBUG nova.network.neutron [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1509.461901] env[69784]: DEBUG nova.compute.manager [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Received event network-vif-plugged-d9d6ba0c-88dc-45a8-9da2-1babafc861a9 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1509.462139] env[69784]: DEBUG oslo_concurrency.lockutils [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] Acquiring lock "93ea7e73-f280-4e22-9ac7-f1be9926a158-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1509.462356] env[69784]: DEBUG oslo_concurrency.lockutils [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1509.462524] env[69784]: DEBUG oslo_concurrency.lockutils [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1509.462778] env[69784]: DEBUG nova.compute.manager [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] No waiting events found dispatching network-vif-plugged-d9d6ba0c-88dc-45a8-9da2-1babafc861a9 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1509.462869] env[69784]: WARNING nova.compute.manager [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Received unexpected event network-vif-plugged-d9d6ba0c-88dc-45a8-9da2-1babafc861a9 for instance with vm_state building and task_state spawning. [ 1509.463028] env[69784]: DEBUG nova.compute.manager [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Received event network-changed-d9d6ba0c-88dc-45a8-9da2-1babafc861a9 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1509.463186] env[69784]: DEBUG nova.compute.manager [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Refreshing instance network info cache due to event network-changed-d9d6ba0c-88dc-45a8-9da2-1babafc861a9. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1509.463362] env[69784]: DEBUG oslo_concurrency.lockutils [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] Acquiring lock "refresh_cache-93ea7e73-f280-4e22-9ac7-f1be9926a158" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1509.488603] env[69784]: DEBUG nova.network.neutron [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Updating instance_info_cache with network_info: [{"id": "d9d6ba0c-88dc-45a8-9da2-1babafc861a9", "address": "fa:16:3e:25:0d:99", "network": {"id": "77eea3d0-f786-4985-bb9e-fc3bf4082a20", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1954480638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff1231ca52524d8c8422253f0e3e05dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9d6ba0c-88", "ovs_interfaceid": "d9d6ba0c-88dc-45a8-9da2-1babafc861a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.502633] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Releasing lock "refresh_cache-93ea7e73-f280-4e22-9ac7-f1be9926a158" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1509.502918] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Instance network_info: |[{"id": "d9d6ba0c-88dc-45a8-9da2-1babafc861a9", "address": "fa:16:3e:25:0d:99", "network": {"id": "77eea3d0-f786-4985-bb9e-fc3bf4082a20", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1954480638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff1231ca52524d8c8422253f0e3e05dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9d6ba0c-88", "ovs_interfaceid": "d9d6ba0c-88dc-45a8-9da2-1babafc861a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1509.503215] env[69784]: DEBUG oslo_concurrency.lockutils [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] Acquired lock "refresh_cache-93ea7e73-f280-4e22-9ac7-f1be9926a158" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1509.503393] env[69784]: DEBUG nova.network.neutron [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Refreshing network info cache for port d9d6ba0c-88dc-45a8-9da2-1babafc861a9 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1509.504405] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:0d:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9d6ba0c-88dc-45a8-9da2-1babafc861a9', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1509.515269] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Creating folder: Project (ff1231ca52524d8c8422253f0e3e05dd). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1509.515909] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b85d30ad-e2d2-43c3-8945-5a0da2099af5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.533128] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Created folder: Project (ff1231ca52524d8c8422253f0e3e05dd) in parent group-v692547. [ 1509.533326] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Creating folder: Instances. Parent ref: group-v692629. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1509.533557] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f7f1953-6c4f-4767-a801-4a95a23eac78 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.542544] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Created folder: Instances in parent group-v692629. [ 1509.542736] env[69784]: DEBUG oslo.service.loopingcall [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1509.542928] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1509.543143] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc73216d-6924-4466-8790-8cbe2d08178c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.563738] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1509.563738] env[69784]: value = "task-3467151" [ 1509.563738] env[69784]: _type = "Task" [ 1509.563738] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.571053] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467151, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.780778] env[69784]: DEBUG nova.network.neutron [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Updated VIF entry in instance network info cache for port d9d6ba0c-88dc-45a8-9da2-1babafc861a9. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1509.781162] env[69784]: DEBUG nova.network.neutron [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Updating instance_info_cache with network_info: [{"id": "d9d6ba0c-88dc-45a8-9da2-1babafc861a9", "address": "fa:16:3e:25:0d:99", "network": {"id": "77eea3d0-f786-4985-bb9e-fc3bf4082a20", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1954480638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff1231ca52524d8c8422253f0e3e05dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9d6ba0c-88", "ovs_interfaceid": "d9d6ba0c-88dc-45a8-9da2-1babafc861a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.791206] env[69784]: DEBUG oslo_concurrency.lockutils [req-ce304e06-c29c-49ae-9700-4bc830391f85 req-c54b34e7-ee95-4d02-8acb-12f95c225035 service nova] Releasing lock "refresh_cache-93ea7e73-f280-4e22-9ac7-f1be9926a158" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1510.073281] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467151, 'name': CreateVM_Task, 'duration_secs': 0.284906} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.073506] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1510.074194] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1510.074439] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1510.074824] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1510.075082] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2b85065-2ce7-419b-8883-cb5de5bb5b6c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.079567] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Waiting for the task: (returnval){ [ 1510.079567] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5269cc31-9001-005f-7bd1-95ecc32fbc47" [ 1510.079567] env[69784]: _type = "Task" [ 1510.079567] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.086829] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5269cc31-9001-005f-7bd1-95ecc32fbc47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.590481] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1510.590788] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1510.590949] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1516.839621] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.839983] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1518.847827] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1518.848121] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1519.840608] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.036295] env[69784]: DEBUG oslo_concurrency.lockutils [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquiring lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1520.839810] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1521.840337] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1521.840679] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances with incomplete migration {{(pid=69784) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1522.849726] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1523.840610] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1523.852017] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1523.852358] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1523.852450] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1523.852579] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1523.853694] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc046ee-40db-48b6-b120-16b87530fac0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.862666] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06782de2-e0a0-4ded-8c00-dd36f922894b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.876452] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ffd9d2-25be-4ad9-aeff-f23944b31f94 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.882714] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da149cc-8c77-4858-ba85-9855c39d5040 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.912172] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180933MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1523.912335] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1523.912539] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1524.061053] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.061157] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.061248] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.061366] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.061499] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.061616] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.061735] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.061850] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.061966] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.062092] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.073559] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 45611c65-5ce7-4e31-add1-29d5b9d87e5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.083965] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance f1af419b-6bc6-4ba5-9ec1-3011b3b055ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.093907] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.104594] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.115274] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.124584] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6d7f1207-ba9a-4d1c-9499-6c0677fd38ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.124910] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1524.125086] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1524.142660] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing inventories for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1524.158240] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating ProviderTree inventory for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1524.158434] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating inventory in ProviderTree for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1524.173144] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing aggregate associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, aggregates: None {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1524.193096] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing trait associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1524.382760] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facfcfd3-7c49-43c9-9f53-00d910c764ac {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.391112] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef796e6e-2b81-4f73-b22e-ebbeae996ca3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.422418] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d19888-d1f9-4ffb-80af-df99e893a2b5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.429808] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0894cba-1663-4b76-b94c-50c842edbdc3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.443482] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.451928] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1524.467017] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1524.467230] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.555s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1525.979408] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1525.979697] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1526.462076] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1526.462267] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1526.462413] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1526.462534] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1526.485437] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.485614] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.485743] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.485873] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.486008] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.486141] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.486264] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.486383] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.486680] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.486680] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1526.486777] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1528.839365] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.839641] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.839765] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1528.849204] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] There are 0 instances to clean {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1539.206388] env[69784]: DEBUG oslo_concurrency.lockutils [None req-538e4518-37dd-4dbb-a33c-a0d50d9c0918 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "75542f72-2ff8-44c7-90f2-b33c3391148e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1539.206660] env[69784]: DEBUG oslo_concurrency.lockutils [None req-538e4518-37dd-4dbb-a33c-a0d50d9c0918 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "75542f72-2ff8-44c7-90f2-b33c3391148e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1553.559124] env[69784]: WARNING oslo_vmware.rw_handles [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1553.559124] env[69784]: ERROR oslo_vmware.rw_handles [ 1553.559767] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1553.561528] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1553.561765] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Copying Virtual Disk [datastore1] vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/03bad93b-b560-4a14-8800-54bcb286cd87/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1553.562063] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80a90823-6318-4914-b1ba-228cea3f2e24 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.572296] env[69784]: DEBUG oslo_vmware.api [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Waiting for the task: (returnval){ [ 1553.572296] env[69784]: value = "task-3467152" [ 1553.572296] env[69784]: _type = "Task" [ 1553.572296] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.581148] env[69784]: DEBUG oslo_vmware.api [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Task: {'id': task-3467152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.082431] env[69784]: DEBUG oslo_vmware.exceptions [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1554.082659] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1554.083252] env[69784]: ERROR nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1554.083252] env[69784]: Faults: ['InvalidArgument'] [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Traceback (most recent call last): [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] yield resources [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] self.driver.spawn(context, instance, image_meta, [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] self._fetch_image_if_missing(context, vi) [ 1554.083252] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] image_cache(vi, tmp_image_ds_loc) [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] vm_util.copy_virtual_disk( [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] session._wait_for_task(vmdk_copy_task) [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] return self.wait_for_task(task_ref) [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] return evt.wait() [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] result = hub.switch() [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1554.083651] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] return self.greenlet.switch() [ 1554.084174] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1554.084174] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] self.f(*self.args, **self.kw) [ 1554.084174] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1554.084174] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] raise exceptions.translate_fault(task_info.error) [ 1554.084174] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1554.084174] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Faults: ['InvalidArgument'] [ 1554.084174] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] [ 1554.084174] env[69784]: INFO nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Terminating instance [ 1554.085125] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1554.085335] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1554.085574] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd3d203f-231e-44d7-9d71-491375bb0f8d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.087922] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1554.088122] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1554.088840] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73dd2fa4-c273-4650-bf33-c779bc8facfd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.095494] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1554.095734] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79cf9eec-c164-473b-b588-7e98dc726689 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.097749] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1554.097923] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1554.098828] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25bee3ec-d608-4ba9-aa33-bf3e01276254 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.105163] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 1554.105163] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52caf755-d704-5af8-2119-86f3dcfd5e69" [ 1554.105163] env[69784]: _type = "Task" [ 1554.105163] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.112532] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52caf755-d704-5af8-2119-86f3dcfd5e69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.611971] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1554.612519] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1554.612519] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Deleting the datastore file [datastore1] 6109a6f5-11ea-4983-b271-f84aa859d6cd {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1554.615730] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07513c16-afd6-494b-a95f-95439239b1e0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.617647] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1554.617888] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating directory with path [datastore1] vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1554.618461] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa0c5258-7a6b-4fd4-90a0-32b1c32a88d9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.621933] env[69784]: DEBUG oslo_vmware.api [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Waiting for the task: (returnval){ [ 1554.621933] env[69784]: value = "task-3467154" [ 1554.621933] env[69784]: _type = "Task" [ 1554.621933] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.629059] env[69784]: DEBUG oslo_vmware.api [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Task: {'id': task-3467154, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.639355] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Created directory with path [datastore1] vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1554.639556] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Fetch image to [datastore1] vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1554.639725] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1554.640476] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cd5ff2-abdf-42a5-958e-77d161603ef1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.647365] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfe5da3-ba1c-411d-ae0a-a508649cb029 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.656050] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ef9341-b241-4fbe-b083-beae80e88a0e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.685681] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b63a8a-6cc1-4c71-b38b-27020c49d3b9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.691361] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-013e7dd7-bab3-4028-bcdd-ee3737aeddd0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.714998] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1554.765372] env[69784]: DEBUG oslo_vmware.rw_handles [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1554.825786] env[69784]: DEBUG oslo_vmware.rw_handles [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1554.825984] env[69784]: DEBUG oslo_vmware.rw_handles [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1555.132742] env[69784]: DEBUG oslo_vmware.api [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Task: {'id': task-3467154, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083276} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.132989] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1555.133183] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1555.133348] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1555.133514] env[69784]: INFO nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Took 1.05 seconds to destroy the instance on the hypervisor. [ 1555.135673] env[69784]: DEBUG nova.compute.claims [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1555.135838] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1555.136060] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1555.352934] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fed1569-76d9-4e7e-aec5-441f5d559e56 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.360361] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665c9623-86db-40dd-b90a-793257c21d45 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.388927] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8671d8a-96f0-4c0f-ba2f-d1268649faa9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.395726] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b335c998-962a-4488-b714-0d4ca9a1118c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.408265] env[69784]: DEBUG nova.compute.provider_tree [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.417629] env[69784]: DEBUG nova.scheduler.client.report [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1555.432637] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1555.433108] env[69784]: ERROR nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1555.433108] env[69784]: Faults: ['InvalidArgument'] [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Traceback (most recent call last): [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] self.driver.spawn(context, instance, image_meta, [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] self._fetch_image_if_missing(context, vi) [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] image_cache(vi, tmp_image_ds_loc) [ 1555.433108] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] vm_util.copy_virtual_disk( [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] session._wait_for_task(vmdk_copy_task) [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] return self.wait_for_task(task_ref) [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] return evt.wait() [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] result = hub.switch() [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] return self.greenlet.switch() [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1555.433456] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] self.f(*self.args, **self.kw) [ 1555.433800] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1555.433800] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] raise exceptions.translate_fault(task_info.error) [ 1555.433800] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1555.433800] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Faults: ['InvalidArgument'] [ 1555.433800] env[69784]: ERROR nova.compute.manager [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] [ 1555.433800] env[69784]: DEBUG nova.compute.utils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1555.435233] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Build of instance 6109a6f5-11ea-4983-b271-f84aa859d6cd was re-scheduled: A specified parameter was not correct: fileType [ 1555.435233] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1555.435622] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1555.435817] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1555.435988] env[69784]: DEBUG nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1555.436168] env[69784]: DEBUG nova.network.neutron [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1555.744057] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33150eef-d64f-406d-8ffc-49625c424391 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "85bc1bfa-46dd-4344-8fe5-a035a9574d02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1555.744333] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33150eef-d64f-406d-8ffc-49625c424391 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "85bc1bfa-46dd-4344-8fe5-a035a9574d02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1555.910312] env[69784]: DEBUG nova.network.neutron [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.926792] env[69784]: INFO nova.compute.manager [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Took 0.49 seconds to deallocate network for instance. [ 1556.021212] env[69784]: INFO nova.scheduler.client.report [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Deleted allocations for instance 6109a6f5-11ea-4983-b271-f84aa859d6cd [ 1556.043623] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0e96c70-ee6b-4751-ae6b-a88b37e3b0f6 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.407s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1556.045017] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.762s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1556.045017] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Acquiring lock "6109a6f5-11ea-4983-b271-f84aa859d6cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1556.045198] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1556.045246] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1556.047143] env[69784]: INFO nova.compute.manager [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Terminating instance [ 1556.048848] env[69784]: DEBUG nova.compute.manager [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1556.049056] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1556.049557] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0a77aa7-8da5-44bf-810d-d4546c4a2842 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.057472] env[69784]: DEBUG nova.compute.manager [None req-7053077b-65d1-4ed1-8db2-065893f4d97f tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 45611c65-5ce7-4e31-add1-29d5b9d87e5a] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1556.063971] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afd280e-c906-4bb4-a96a-b28ef57ae719 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.081632] env[69784]: DEBUG nova.compute.manager [None req-7053077b-65d1-4ed1-8db2-065893f4d97f tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 45611c65-5ce7-4e31-add1-29d5b9d87e5a] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1556.093342] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6109a6f5-11ea-4983-b271-f84aa859d6cd could not be found. [ 1556.093542] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1556.093718] env[69784]: INFO nova.compute.manager [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1556.093961] env[69784]: DEBUG oslo.service.loopingcall [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.094425] env[69784]: DEBUG nova.compute.manager [-] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1556.094519] env[69784]: DEBUG nova.network.neutron [-] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1556.111886] env[69784]: DEBUG oslo_concurrency.lockutils [None req-7053077b-65d1-4ed1-8db2-065893f4d97f tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "45611c65-5ce7-4e31-add1-29d5b9d87e5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.120s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1556.122569] env[69784]: DEBUG nova.compute.manager [None req-11194526-e975-47c7-ac62-b785ffaa1c2f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: f1af419b-6bc6-4ba5-9ec1-3011b3b055ef] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1556.126390] env[69784]: DEBUG nova.network.neutron [-] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.135842] env[69784]: INFO nova.compute.manager [-] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] Took 0.04 seconds to deallocate network for instance. [ 1556.152224] env[69784]: DEBUG nova.compute.manager [None req-11194526-e975-47c7-ac62-b785ffaa1c2f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: f1af419b-6bc6-4ba5-9ec1-3011b3b055ef] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1556.172989] env[69784]: DEBUG oslo_concurrency.lockutils [None req-11194526-e975-47c7-ac62-b785ffaa1c2f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "f1af419b-6bc6-4ba5-9ec1-3011b3b055ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.620s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1556.186513] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1556.225575] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33c80ab5-9ef5-4ef3-8639-c003d6db0f68 tempest-ServerActionsTestOtherB-31001503 tempest-ServerActionsTestOtherB-31001503-project-member] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.181s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1556.226437] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 283.334s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1556.226630] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6109a6f5-11ea-4983-b271-f84aa859d6cd] During sync_power_state the instance has a pending task (deleting). Skip. [ 1556.226803] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "6109a6f5-11ea-4983-b271-f84aa859d6cd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1556.235590] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1556.235831] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1556.237347] env[69784]: INFO nova.compute.claims [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1556.454570] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1937b675-4dc2-4527-a602-d54bdcaf695d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.463167] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402e7a7d-ea0f-4af5-aa56-f4e202293447 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.491463] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654438c4-5d94-42e2-8e5a-18889814f0cc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.497854] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d918305e-a55c-4009-8110-a2af03313eee {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.511142] env[69784]: DEBUG nova.compute.provider_tree [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1556.522351] env[69784]: DEBUG nova.scheduler.client.report [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1556.535494] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1556.535971] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1556.569055] env[69784]: DEBUG nova.compute.utils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1556.570433] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1556.570581] env[69784]: DEBUG nova.network.neutron [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1556.578401] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1556.658521] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1556.676472] env[69784]: DEBUG nova.policy [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6552a441b39442db22371e84b909061', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a606bffcb6eb43c8a554793617386555', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1556.685766] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1556.686031] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1556.686198] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1556.686379] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1556.686521] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1556.686665] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1556.686869] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1556.687045] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1556.687219] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1556.687414] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1556.687592] env[69784]: DEBUG nova.virt.hardware [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1556.688483] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73531217-ddba-4e09-9579-4ce3cd7d1c9c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.696931] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b654e17-0349-4887-b2fb-34d5e2f5625b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.304713] env[69784]: DEBUG nova.network.neutron [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Successfully created port: 3380ebd2-901e-4b17-8617-a98328107545 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1557.940164] env[69784]: DEBUG nova.network.neutron [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Successfully updated port: 3380ebd2-901e-4b17-8617-a98328107545 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1557.950295] env[69784]: DEBUG nova.compute.manager [req-75944e5b-d9ce-4025-ad1a-cda810f329a2 req-0dbd1640-b9d8-42b6-8276-2eb574c3b394 service nova] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Received event network-vif-plugged-3380ebd2-901e-4b17-8617-a98328107545 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1557.950545] env[69784]: DEBUG oslo_concurrency.lockutils [req-75944e5b-d9ce-4025-ad1a-cda810f329a2 req-0dbd1640-b9d8-42b6-8276-2eb574c3b394 service nova] Acquiring lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1557.950760] env[69784]: DEBUG oslo_concurrency.lockutils [req-75944e5b-d9ce-4025-ad1a-cda810f329a2 req-0dbd1640-b9d8-42b6-8276-2eb574c3b394 service nova] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1557.950998] env[69784]: DEBUG oslo_concurrency.lockutils [req-75944e5b-d9ce-4025-ad1a-cda810f329a2 req-0dbd1640-b9d8-42b6-8276-2eb574c3b394 service nova] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1557.951103] env[69784]: DEBUG nova.compute.manager [req-75944e5b-d9ce-4025-ad1a-cda810f329a2 req-0dbd1640-b9d8-42b6-8276-2eb574c3b394 service nova] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] No waiting events found dispatching network-vif-plugged-3380ebd2-901e-4b17-8617-a98328107545 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1557.951273] env[69784]: WARNING nova.compute.manager [req-75944e5b-d9ce-4025-ad1a-cda810f329a2 req-0dbd1640-b9d8-42b6-8276-2eb574c3b394 service nova] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Received unexpected event network-vif-plugged-3380ebd2-901e-4b17-8617-a98328107545 for instance with vm_state building and task_state spawning. [ 1557.952498] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "refresh_cache-db704361-31ad-49a0-8aa7-01d4e3f42a3d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1557.952639] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquired lock "refresh_cache-db704361-31ad-49a0-8aa7-01d4e3f42a3d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1557.952783] env[69784]: DEBUG nova.network.neutron [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1557.991197] env[69784]: DEBUG nova.network.neutron [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1558.188878] env[69784]: DEBUG nova.network.neutron [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Updating instance_info_cache with network_info: [{"id": "3380ebd2-901e-4b17-8617-a98328107545", "address": "fa:16:3e:08:1c:1c", "network": {"id": "20eb192d-4429-4b17-89d3-153745821175", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1252408980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a606bffcb6eb43c8a554793617386555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3380ebd2-90", "ovs_interfaceid": "3380ebd2-901e-4b17-8617-a98328107545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.201443] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Releasing lock "refresh_cache-db704361-31ad-49a0-8aa7-01d4e3f42a3d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1558.201723] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Instance network_info: |[{"id": "3380ebd2-901e-4b17-8617-a98328107545", "address": "fa:16:3e:08:1c:1c", "network": {"id": "20eb192d-4429-4b17-89d3-153745821175", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1252408980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a606bffcb6eb43c8a554793617386555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3380ebd2-90", "ovs_interfaceid": "3380ebd2-901e-4b17-8617-a98328107545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1558.202113] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:1c:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3380ebd2-901e-4b17-8617-a98328107545', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1558.209404] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Creating folder: Project (a606bffcb6eb43c8a554793617386555). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1558.209934] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5fafd33-c3a7-4613-9c94-b3985e00ad73 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.221352] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Created folder: Project (a606bffcb6eb43c8a554793617386555) in parent group-v692547. [ 1558.221535] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Creating folder: Instances. Parent ref: group-v692632. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1558.221749] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecd95d3d-211c-4558-9687-eba45c939505 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.229552] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Created folder: Instances in parent group-v692632. [ 1558.229765] env[69784]: DEBUG oslo.service.loopingcall [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.229934] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1558.230137] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8a86256-2195-45aa-b9bd-d6d78d9dd67b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.249514] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1558.249514] env[69784]: value = "task-3467157" [ 1558.249514] env[69784]: _type = "Task" [ 1558.249514] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.257109] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467157, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.760008] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467157, 'name': CreateVM_Task, 'duration_secs': 0.285256} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.760331] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1558.760930] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1558.761146] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1558.761511] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1558.761780] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc29fe9d-d4f5-46eb-8db7-d357aca3b38b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.766170] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for the task: (returnval){ [ 1558.766170] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]529e3820-ffd2-6358-8a5b-bd4a360c07ba" [ 1558.766170] env[69784]: _type = "Task" [ 1558.766170] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.773911] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]529e3820-ffd2-6358-8a5b-bd4a360c07ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.276070] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1559.276333] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1559.276544] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1559.978729] env[69784]: DEBUG nova.compute.manager [req-7face9ef-bc9e-4955-b037-cd1b28f79c11 req-bea6d6fe-288f-4bcf-8518-ae7c18f0f340 service nova] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Received event network-changed-3380ebd2-901e-4b17-8617-a98328107545 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1559.978998] env[69784]: DEBUG nova.compute.manager [req-7face9ef-bc9e-4955-b037-cd1b28f79c11 req-bea6d6fe-288f-4bcf-8518-ae7c18f0f340 service nova] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Refreshing instance network info cache due to event network-changed-3380ebd2-901e-4b17-8617-a98328107545. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1559.979167] env[69784]: DEBUG oslo_concurrency.lockutils [req-7face9ef-bc9e-4955-b037-cd1b28f79c11 req-bea6d6fe-288f-4bcf-8518-ae7c18f0f340 service nova] Acquiring lock "refresh_cache-db704361-31ad-49a0-8aa7-01d4e3f42a3d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1559.979345] env[69784]: DEBUG oslo_concurrency.lockutils [req-7face9ef-bc9e-4955-b037-cd1b28f79c11 req-bea6d6fe-288f-4bcf-8518-ae7c18f0f340 service nova] Acquired lock "refresh_cache-db704361-31ad-49a0-8aa7-01d4e3f42a3d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1559.979517] env[69784]: DEBUG nova.network.neutron [req-7face9ef-bc9e-4955-b037-cd1b28f79c11 req-bea6d6fe-288f-4bcf-8518-ae7c18f0f340 service nova] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Refreshing network info cache for port 3380ebd2-901e-4b17-8617-a98328107545 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1560.289525] env[69784]: DEBUG nova.network.neutron [req-7face9ef-bc9e-4955-b037-cd1b28f79c11 req-bea6d6fe-288f-4bcf-8518-ae7c18f0f340 service nova] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Updated VIF entry in instance network info cache for port 3380ebd2-901e-4b17-8617-a98328107545. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1560.289857] env[69784]: DEBUG nova.network.neutron [req-7face9ef-bc9e-4955-b037-cd1b28f79c11 req-bea6d6fe-288f-4bcf-8518-ae7c18f0f340 service nova] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Updating instance_info_cache with network_info: [{"id": "3380ebd2-901e-4b17-8617-a98328107545", "address": "fa:16:3e:08:1c:1c", "network": {"id": "20eb192d-4429-4b17-89d3-153745821175", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1252408980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a606bffcb6eb43c8a554793617386555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3380ebd2-90", "ovs_interfaceid": "3380ebd2-901e-4b17-8617-a98328107545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.298779] env[69784]: DEBUG oslo_concurrency.lockutils [req-7face9ef-bc9e-4955-b037-cd1b28f79c11 req-bea6d6fe-288f-4bcf-8518-ae7c18f0f340 service nova] Releasing lock "refresh_cache-db704361-31ad-49a0-8aa7-01d4e3f42a3d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1576.849940] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.840025] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.840325] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1580.841048] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1581.840579] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1584.840676] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1585.840456] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1585.840659] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1585.840783] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1585.862787] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.862947] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.863270] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.863489] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.863635] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.863768] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.863894] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.864033] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.864166] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.864304] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1585.864430] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1585.864946] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1585.876011] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1585.876235] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1585.876401] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1585.876554] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1585.877975] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f67fbab-ee12-4e7a-9881-655ad29d7f55 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.886450] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3431413d-e438-4f87-9f3d-12d6db5068a2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.900338] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fca5bf-67b0-42d0-8200-7b645a7cbd60 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.906826] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0927925a-9fce-4dce-9a49-f413c1b7b38d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.937755] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180930MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1585.937944] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1585.938174] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1586.014489] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.014650] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.014778] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.014920] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.015146] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.015301] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.015442] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.015538] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.015654] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.015784] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.026862] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1586.037217] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1586.047500] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6d7f1207-ba9a-4d1c-9499-6c0677fd38ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1586.057237] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1586.067665] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 75542f72-2ff8-44c7-90f2-b33c3391148e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1586.080434] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 85bc1bfa-46dd-4344-8fe5-a035a9574d02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1586.080669] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1586.080823] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1586.287798] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc88ff51-b0a8-4c29-8541-ba97155a6903 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.295443] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3609478-0492-4386-bcaa-3471b3e78c2f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.324515] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc94c09-fd0a-457a-92d8-a3539d728b07 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.331893] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9420c56b-353f-4e59-8a27-c883faae1e40 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.344595] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.352910] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1586.366323] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1586.366503] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.428s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1587.361022] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1588.840472] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.835112] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1602.625598] env[69784]: WARNING oslo_vmware.rw_handles [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1602.625598] env[69784]: ERROR oslo_vmware.rw_handles [ 1602.626348] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1602.628231] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1602.628522] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Copying Virtual Disk [datastore1] vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/6e2a54eb-7217-4ab0-bbf4-b28f7cd40f8f/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1602.628865] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f02dbdad-8232-41eb-9c92-0de496606a23 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.638995] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 1602.638995] env[69784]: value = "task-3467158" [ 1602.638995] env[69784]: _type = "Task" [ 1602.638995] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.648424] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467158, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.150450] env[69784]: DEBUG oslo_vmware.exceptions [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1603.150832] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1603.151515] env[69784]: ERROR nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1603.151515] env[69784]: Faults: ['InvalidArgument'] [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Traceback (most recent call last): [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] yield resources [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] self.driver.spawn(context, instance, image_meta, [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] self._fetch_image_if_missing(context, vi) [ 1603.151515] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] image_cache(vi, tmp_image_ds_loc) [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] vm_util.copy_virtual_disk( [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] session._wait_for_task(vmdk_copy_task) [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] return self.wait_for_task(task_ref) [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] return evt.wait() [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] result = hub.switch() [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1603.151830] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] return self.greenlet.switch() [ 1603.152161] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1603.152161] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] self.f(*self.args, **self.kw) [ 1603.152161] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1603.152161] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] raise exceptions.translate_fault(task_info.error) [ 1603.152161] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1603.152161] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Faults: ['InvalidArgument'] [ 1603.152161] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] [ 1603.152161] env[69784]: INFO nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Terminating instance [ 1603.153624] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1603.156022] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1603.156022] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1603.156022] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1603.156022] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d58aa021-00ef-4d75-a86e-aa6e962a8b83 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.157417] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc14b59-d28c-443c-840c-5677c16ee500 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.165169] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1603.166279] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14afe56c-9598-464b-a451-e685b4bea737 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.167777] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1603.167969] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1603.168641] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd0f870d-4d42-4cca-a313-6fc071e33a97 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.174670] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Waiting for the task: (returnval){ [ 1603.174670] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5261ad11-5802-1ab6-3a44-9b50d1dc95d7" [ 1603.174670] env[69784]: _type = "Task" [ 1603.174670] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.182352] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5261ad11-5802-1ab6-3a44-9b50d1dc95d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.241924] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1603.242192] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1603.242350] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleting the datastore file [datastore1] 694e2a62-5f2e-475d-9356-a66651c3e5e2 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1603.242619] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af85d0e1-772a-410f-9b46-9059d01e0d8c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.249083] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 1603.249083] env[69784]: value = "task-3467160" [ 1603.249083] env[69784]: _type = "Task" [ 1603.249083] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.257013] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.684285] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1603.684589] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Creating directory with path [datastore1] vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1603.684770] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a3b8a99-b404-49e1-b516-0e34e95f082b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.696224] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Created directory with path [datastore1] vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1603.696424] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Fetch image to [datastore1] vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1603.696593] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1603.697382] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1c8594-ca07-4462-ba56-03e7de107b62 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.703844] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df8fcfa-53a0-4f07-8f28-3dd99e7f387b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.712916] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ff8a40-5d82-4355-b3e8-74a2efd274f5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.742718] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d0e993-7832-4081-bbe2-f67ebedb1553 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.748631] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5daa1732-9738-4e9a-b98c-1e894c1ed878 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.758518] env[69784]: DEBUG oslo_vmware.api [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082357} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.758733] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1603.758907] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1603.759080] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1603.759257] env[69784]: INFO nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1603.761280] env[69784]: DEBUG nova.compute.claims [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1603.761452] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1603.761676] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1603.772147] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1603.821767] env[69784]: DEBUG oslo_vmware.rw_handles [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1603.881134] env[69784]: DEBUG oslo_vmware.rw_handles [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1603.881134] env[69784]: DEBUG oslo_vmware.rw_handles [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1604.034755] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dda7ffd-709d-4f4f-9751-0dda677efe4d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.041986] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2917b3eb-8eb2-4d6a-b272-e0e9d74fc27d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.071365] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb577cac-fbde-4548-8943-cae566643a38 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.079220] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8733405-8500-4300-8338-75ef3197e67f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.092297] env[69784]: DEBUG nova.compute.provider_tree [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1604.100688] env[69784]: DEBUG nova.scheduler.client.report [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1604.113875] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.352s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1604.114413] env[69784]: ERROR nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1604.114413] env[69784]: Faults: ['InvalidArgument'] [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Traceback (most recent call last): [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] self.driver.spawn(context, instance, image_meta, [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] self._fetch_image_if_missing(context, vi) [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] image_cache(vi, tmp_image_ds_loc) [ 1604.114413] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] vm_util.copy_virtual_disk( [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] session._wait_for_task(vmdk_copy_task) [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] return self.wait_for_task(task_ref) [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] return evt.wait() [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] result = hub.switch() [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] return self.greenlet.switch() [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1604.114791] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] self.f(*self.args, **self.kw) [ 1604.115111] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1604.115111] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] raise exceptions.translate_fault(task_info.error) [ 1604.115111] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1604.115111] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Faults: ['InvalidArgument'] [ 1604.115111] env[69784]: ERROR nova.compute.manager [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] [ 1604.115111] env[69784]: DEBUG nova.compute.utils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1604.116512] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Build of instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 was re-scheduled: A specified parameter was not correct: fileType [ 1604.116512] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1604.116878] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1604.117059] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1604.117231] env[69784]: DEBUG nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1604.117428] env[69784]: DEBUG nova.network.neutron [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1604.493928] env[69784]: DEBUG nova.network.neutron [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.506741] env[69784]: INFO nova.compute.manager [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Took 0.39 seconds to deallocate network for instance. [ 1604.599059] env[69784]: INFO nova.scheduler.client.report [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleted allocations for instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 [ 1604.621693] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0330ec23-bf3a-4bc9-a170-3396a3cc28bb tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 639.959s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1604.622897] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 443.301s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1604.623081] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "694e2a62-5f2e-475d-9356-a66651c3e5e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1604.623288] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1604.623459] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1604.625413] env[69784]: INFO nova.compute.manager [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Terminating instance [ 1604.627030] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1604.627188] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1604.627396] env[69784]: DEBUG nova.network.neutron [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1604.632826] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1604.660069] env[69784]: DEBUG nova.network.neutron [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1604.685263] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1604.685569] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1604.687304] env[69784]: INFO nova.compute.claims [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1604.822940] env[69784]: DEBUG nova.network.neutron [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.833366] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "refresh_cache-694e2a62-5f2e-475d-9356-a66651c3e5e2" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1604.833753] env[69784]: DEBUG nova.compute.manager [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1604.833936] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1604.834510] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cdb24f66-319d-4899-88d1-fbf00517e860 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.845904] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ba8414-a260-4bc4-87ab-33bbac5f9efd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.876746] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 694e2a62-5f2e-475d-9356-a66651c3e5e2 could not be found. [ 1604.876954] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1604.877148] env[69784]: INFO nova.compute.manager [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1604.877421] env[69784]: DEBUG oslo.service.loopingcall [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1604.879725] env[69784]: DEBUG nova.compute.manager [-] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1604.879873] env[69784]: DEBUG nova.network.neutron [-] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1604.897111] env[69784]: DEBUG nova.network.neutron [-] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1604.904824] env[69784]: DEBUG nova.network.neutron [-] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.916369] env[69784]: INFO nova.compute.manager [-] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] Took 0.04 seconds to deallocate network for instance. [ 1604.957245] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e96c9f-969a-4729-81fd-10383a232938 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.969875] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fce636b-62e0-4442-b2e4-2e5d5afb7d83 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.003713] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd4f9cd-3875-40fd-b4f8-0e5a0a928518 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.016697] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f81799-3964-4d9f-8e4e-eb8dc1f903e0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.024273] env[69784]: DEBUG oslo_concurrency.lockutils [None req-75e733a1-5e82-45ff-aa5d-97a27041c255 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.401s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1605.025170] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 332.132s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1605.025433] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 694e2a62-5f2e-475d-9356-a66651c3e5e2] During sync_power_state the instance has a pending task (deleting). Skip. [ 1605.026534] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "694e2a62-5f2e-475d-9356-a66651c3e5e2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1605.035475] env[69784]: DEBUG nova.compute.provider_tree [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.044022] env[69784]: DEBUG nova.scheduler.client.report [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1605.057878] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.372s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1605.058616] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1605.092381] env[69784]: DEBUG nova.compute.utils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1605.093587] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1605.093761] env[69784]: DEBUG nova.network.neutron [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1605.105476] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1605.154159] env[69784]: DEBUG nova.policy [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96c78048ebe64c29ba419a8bc9c1d12b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f25ef73b5b44c47b4ff56130ab8052e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1605.178049] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1605.204381] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1605.204724] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1605.204888] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1605.205185] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1605.205360] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1605.205516] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1605.205725] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1605.205885] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1605.206067] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1605.206238] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1605.206415] env[69784]: DEBUG nova.virt.hardware [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1605.207259] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbee33ce-9360-45a7-9947-036ffccafac6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.215544] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d055da-beed-4f4a-a354-c956d8499b0b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.552370] env[69784]: DEBUG nova.network.neutron [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Successfully created port: eac088b8-317f-4525-8fdc-77ee7d8b4a3b {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1606.436133] env[69784]: DEBUG nova.network.neutron [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Successfully updated port: eac088b8-317f-4525-8fdc-77ee7d8b4a3b {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1606.451413] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquiring lock "refresh_cache-a34a0620-ea85-4bd5-9690-c93d70ecb9ec" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1606.451609] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquired lock "refresh_cache-a34a0620-ea85-4bd5-9690-c93d70ecb9ec" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1606.451788] env[69784]: DEBUG nova.network.neutron [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1606.487585] env[69784]: DEBUG nova.network.neutron [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1606.536483] env[69784]: DEBUG nova.compute.manager [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Received event network-vif-plugged-eac088b8-317f-4525-8fdc-77ee7d8b4a3b {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1606.536483] env[69784]: DEBUG oslo_concurrency.lockutils [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] Acquiring lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1606.536483] env[69784]: DEBUG oslo_concurrency.lockutils [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1606.536483] env[69784]: DEBUG oslo_concurrency.lockutils [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1606.536700] env[69784]: DEBUG nova.compute.manager [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] No waiting events found dispatching network-vif-plugged-eac088b8-317f-4525-8fdc-77ee7d8b4a3b {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1606.536892] env[69784]: WARNING nova.compute.manager [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Received unexpected event network-vif-plugged-eac088b8-317f-4525-8fdc-77ee7d8b4a3b for instance with vm_state building and task_state spawning. [ 1606.537111] env[69784]: DEBUG nova.compute.manager [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Received event network-changed-eac088b8-317f-4525-8fdc-77ee7d8b4a3b {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1606.537309] env[69784]: DEBUG nova.compute.manager [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Refreshing instance network info cache due to event network-changed-eac088b8-317f-4525-8fdc-77ee7d8b4a3b. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1606.537526] env[69784]: DEBUG oslo_concurrency.lockutils [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] Acquiring lock "refresh_cache-a34a0620-ea85-4bd5-9690-c93d70ecb9ec" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1606.676338] env[69784]: DEBUG nova.network.neutron [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Updating instance_info_cache with network_info: [{"id": "eac088b8-317f-4525-8fdc-77ee7d8b4a3b", "address": "fa:16:3e:0e:f0:81", "network": {"id": "97b6fcf7-760e-4495-8643-3b7e2885d04a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1114649976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f25ef73b5b44c47b4ff56130ab8052e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac088b8-31", "ovs_interfaceid": "eac088b8-317f-4525-8fdc-77ee7d8b4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.689199] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Releasing lock "refresh_cache-a34a0620-ea85-4bd5-9690-c93d70ecb9ec" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1606.689495] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Instance network_info: |[{"id": "eac088b8-317f-4525-8fdc-77ee7d8b4a3b", "address": "fa:16:3e:0e:f0:81", "network": {"id": "97b6fcf7-760e-4495-8643-3b7e2885d04a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1114649976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f25ef73b5b44c47b4ff56130ab8052e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac088b8-31", "ovs_interfaceid": "eac088b8-317f-4525-8fdc-77ee7d8b4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1606.689789] env[69784]: DEBUG oslo_concurrency.lockutils [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] Acquired lock "refresh_cache-a34a0620-ea85-4bd5-9690-c93d70ecb9ec" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1606.689970] env[69784]: DEBUG nova.network.neutron [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Refreshing network info cache for port eac088b8-317f-4525-8fdc-77ee7d8b4a3b {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1606.690968] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:f0:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52c1f5eb-3d4a-4faa-a30d-2b0a46430791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eac088b8-317f-4525-8fdc-77ee7d8b4a3b', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1606.699017] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Creating folder: Project (8f25ef73b5b44c47b4ff56130ab8052e). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1606.699893] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e643fe2-b8cb-4773-a5c3-337ebe5ea9c4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.716835] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Created folder: Project (8f25ef73b5b44c47b4ff56130ab8052e) in parent group-v692547. [ 1606.717019] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Creating folder: Instances. Parent ref: group-v692635. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1606.717239] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0faf4bde-ea38-4a8e-b68d-bd085483e954 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.725689] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Created folder: Instances in parent group-v692635. [ 1606.725928] env[69784]: DEBUG oslo.service.loopingcall [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.726148] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1606.726349] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12557c8b-8c4a-4354-ab6b-3bbad36f1bae {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.747334] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1606.747334] env[69784]: value = "task-3467163" [ 1606.747334] env[69784]: _type = "Task" [ 1606.747334] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.754497] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467163, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.956941] env[69784]: DEBUG nova.network.neutron [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Updated VIF entry in instance network info cache for port eac088b8-317f-4525-8fdc-77ee7d8b4a3b. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1606.957342] env[69784]: DEBUG nova.network.neutron [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Updating instance_info_cache with network_info: [{"id": "eac088b8-317f-4525-8fdc-77ee7d8b4a3b", "address": "fa:16:3e:0e:f0:81", "network": {"id": "97b6fcf7-760e-4495-8643-3b7e2885d04a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1114649976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f25ef73b5b44c47b4ff56130ab8052e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac088b8-31", "ovs_interfaceid": "eac088b8-317f-4525-8fdc-77ee7d8b4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.967308] env[69784]: DEBUG oslo_concurrency.lockutils [req-e0dde11c-e365-4bb6-bd1d-e953797411c8 req-e0021dc8-5e5b-4618-a5d0-6bd36b38e701 service nova] Releasing lock "refresh_cache-a34a0620-ea85-4bd5-9690-c93d70ecb9ec" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1607.258158] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467163, 'name': CreateVM_Task, 'duration_secs': 0.266977} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.258343] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1607.258999] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1607.259222] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1607.259572] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1607.259822] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ba2f7d0-32fc-4a32-8085-451bef21f360 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.264658] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Waiting for the task: (returnval){ [ 1607.264658] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]521ce7cc-3e50-9551-6da3-22d78f933ce2" [ 1607.264658] env[69784]: _type = "Task" [ 1607.264658] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.272590] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]521ce7cc-3e50-9551-6da3-22d78f933ce2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.774704] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1607.775035] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1607.775186] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1616.148071] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquiring lock "7632e563-1790-442f-9e13-77f3d93e4223" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1616.148071] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "7632e563-1790-442f-9e13-77f3d93e4223" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1621.173995] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "871e3b73-d4d8-4081-8b92-0dee212d8961" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1621.174314] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1638.839709] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1640.840628] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1640.841894] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1641.841371] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1643.841360] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1644.840623] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.840292] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.840680] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1645.840680] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1645.864786] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.864967] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.865096] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.865226] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.865349] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.865470] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.865590] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.865743] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.865911] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.866049] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1645.866173] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1645.866719] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.877541] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1645.877802] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1645.877944] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1645.878113] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1645.879187] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbd8274-07a3-40c7-a493-aa11c50747fa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.888082] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55498e9a-2cbf-4572-acee-9f556a22e35c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.903277] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782daf3f-bc19-4e88-bfeb-cc88bbee3798 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.909496] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0104b9e9-5147-428d-8be2-674cb9a4a43b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.937699] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180958MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1645.937887] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1645.938083] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1646.009850] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e6d05e25-386e-43d1-aec4-d62b9476891d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.010031] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.010162] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.010285] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.010403] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.010519] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.010638] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.010750] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.010863] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.011053] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1646.021850] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1646.032840] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6d7f1207-ba9a-4d1c-9499-6c0677fd38ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1646.042776] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1646.053504] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 75542f72-2ff8-44c7-90f2-b33c3391148e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1646.062106] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 85bc1bfa-46dd-4344-8fe5-a035a9574d02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1646.071856] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1646.081327] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1646.081544] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1646.081691] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1646.277550] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26caef63-c4a4-4e8c-af1e-8d4711766900 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.285206] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee1d141-0787-4c57-a209-46851a6a0a80 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.315483] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90432542-2ed6-460c-8ec9-a5af83e410b0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.322753] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33e5e0b-4e30-4635-a329-861dbcd7cea1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.335800] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.344093] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1646.358465] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1646.358465] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.420s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1648.353693] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.840490] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.643310] env[69784]: WARNING oslo_vmware.rw_handles [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1652.643310] env[69784]: ERROR oslo_vmware.rw_handles [ 1652.643961] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1652.646085] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1652.646214] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Copying Virtual Disk [datastore1] vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/6bf84af9-0aec-479a-a12a-ea79ffb84c0d/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1652.647011] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67043075-0f25-4dda-88a2-f444eca0f909 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.654614] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Waiting for the task: (returnval){ [ 1652.654614] env[69784]: value = "task-3467164" [ 1652.654614] env[69784]: _type = "Task" [ 1652.654614] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.662243] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Task: {'id': task-3467164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.165169] env[69784]: DEBUG oslo_vmware.exceptions [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1653.165476] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1653.166066] env[69784]: ERROR nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1653.166066] env[69784]: Faults: ['InvalidArgument'] [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Traceback (most recent call last): [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] yield resources [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] self.driver.spawn(context, instance, image_meta, [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] self._fetch_image_if_missing(context, vi) [ 1653.166066] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] image_cache(vi, tmp_image_ds_loc) [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] vm_util.copy_virtual_disk( [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] session._wait_for_task(vmdk_copy_task) [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] return self.wait_for_task(task_ref) [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] return evt.wait() [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] result = hub.switch() [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1653.166473] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] return self.greenlet.switch() [ 1653.166848] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1653.166848] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] self.f(*self.args, **self.kw) [ 1653.166848] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1653.166848] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] raise exceptions.translate_fault(task_info.error) [ 1653.166848] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1653.166848] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Faults: ['InvalidArgument'] [ 1653.166848] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] [ 1653.166848] env[69784]: INFO nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Terminating instance [ 1653.167934] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1653.168793] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1653.169100] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47035101-4ae6-4a3e-963e-e72f796fce31 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.171330] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1653.171526] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1653.172272] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743c4059-a20c-40fe-81db-02af633a5316 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.179457] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1653.180479] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d004b380-218c-4129-997e-4e37aad62dae {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.181915] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1653.182102] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1653.182768] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-571a5d02-3ba1-46c3-8d83-b50f178b8eb3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.187489] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Waiting for the task: (returnval){ [ 1653.187489] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]527c2685-bbee-cdb3-3904-1bf844d444d1" [ 1653.187489] env[69784]: _type = "Task" [ 1653.187489] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.194820] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]527c2685-bbee-cdb3-3904-1bf844d444d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.249549] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1653.249824] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1653.250032] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Deleting the datastore file [datastore1] e6d05e25-386e-43d1-aec4-d62b9476891d {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1653.250323] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e07a35ea-9d0c-4f51-b846-6024b007bd2d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.256787] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Waiting for the task: (returnval){ [ 1653.256787] env[69784]: value = "task-3467166" [ 1653.256787] env[69784]: _type = "Task" [ 1653.256787] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.264545] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Task: {'id': task-3467166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.697582] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1653.697896] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Creating directory with path [datastore1] vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1653.698255] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75f3a68b-05d2-4a61-94ba-92786fe411a2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.709968] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Created directory with path [datastore1] vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1653.710179] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Fetch image to [datastore1] vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1653.710366] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1653.711141] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3e55ee-2487-4a6a-811f-1ac4a496cd3f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.717920] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae6affa-26de-41ea-9da4-489fae3912d1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.728423] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3ec245-00a0-4cc1-9326-8a243ce85117 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.761903] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26741f5f-3ca3-49f3-925e-c5e6af61cbe0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.769730] env[69784]: DEBUG oslo_vmware.api [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Task: {'id': task-3467166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075611} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.770723] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1653.770923] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1653.771113] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1653.771289] env[69784]: INFO nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1653.773336] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3f482aca-d5c5-4c4e-accd-aad6a64bee0b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.775307] env[69784]: DEBUG nova.compute.claims [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1653.775481] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1653.775697] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1653.796898] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1653.850308] env[69784]: DEBUG oslo_vmware.rw_handles [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1653.912166] env[69784]: DEBUG oslo_vmware.rw_handles [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1653.912369] env[69784]: DEBUG oslo_vmware.rw_handles [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1654.074528] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866a6dd9-1bb6-430f-9e65-16f0883de201 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.082288] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7939d2-b482-4189-b23f-6572de1eb609 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.112747] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cadc7c-a964-4dcf-b3da-201355e176a3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.119400] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9077c8ce-1216-48f9-95af-121751033614 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.132008] env[69784]: DEBUG nova.compute.provider_tree [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1654.139985] env[69784]: DEBUG nova.scheduler.client.report [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1654.155706] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.380s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1654.156351] env[69784]: ERROR nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1654.156351] env[69784]: Faults: ['InvalidArgument'] [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Traceback (most recent call last): [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] self.driver.spawn(context, instance, image_meta, [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] self._fetch_image_if_missing(context, vi) [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] image_cache(vi, tmp_image_ds_loc) [ 1654.156351] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] vm_util.copy_virtual_disk( [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] session._wait_for_task(vmdk_copy_task) [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] return self.wait_for_task(task_ref) [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] return evt.wait() [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] result = hub.switch() [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] return self.greenlet.switch() [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1654.156768] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] self.f(*self.args, **self.kw) [ 1654.157145] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1654.157145] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] raise exceptions.translate_fault(task_info.error) [ 1654.157145] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1654.157145] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Faults: ['InvalidArgument'] [ 1654.157145] env[69784]: ERROR nova.compute.manager [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] [ 1654.157145] env[69784]: DEBUG nova.compute.utils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1654.158740] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Build of instance e6d05e25-386e-43d1-aec4-d62b9476891d was re-scheduled: A specified parameter was not correct: fileType [ 1654.158740] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1654.159144] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1654.159320] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1654.159487] env[69784]: DEBUG nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1654.159656] env[69784]: DEBUG nova.network.neutron [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1654.475027] env[69784]: DEBUG nova.network.neutron [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.487742] env[69784]: INFO nova.compute.manager [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Took 0.33 seconds to deallocate network for instance. [ 1654.583162] env[69784]: INFO nova.scheduler.client.report [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Deleted allocations for instance e6d05e25-386e-43d1-aec4-d62b9476891d [ 1654.605748] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2969e95-5c7a-4f1b-94a4-d89badb36ba5 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 584.302s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1654.606971] env[69784]: DEBUG oslo_concurrency.lockutils [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.843s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1654.607197] env[69784]: DEBUG oslo_concurrency.lockutils [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Acquiring lock "e6d05e25-386e-43d1-aec4-d62b9476891d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1654.607424] env[69784]: DEBUG oslo_concurrency.lockutils [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1654.607732] env[69784]: DEBUG oslo_concurrency.lockutils [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1654.609729] env[69784]: INFO nova.compute.manager [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Terminating instance [ 1654.613678] env[69784]: DEBUG nova.compute.manager [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1654.614015] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1654.614378] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48a0cb31-f497-4c9f-b2ee-8c33b9e93b00 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.624096] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d327a76e-41d4-4089-a11d-07d4be69d318 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.642480] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1654.654244] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6d05e25-386e-43d1-aec4-d62b9476891d could not be found. [ 1654.654597] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1654.654690] env[69784]: INFO nova.compute.manager [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1654.654938] env[69784]: DEBUG oslo.service.loopingcall [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1654.656273] env[69784]: DEBUG nova.compute.manager [-] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1654.656273] env[69784]: DEBUG nova.network.neutron [-] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1654.680994] env[69784]: DEBUG nova.network.neutron [-] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.688476] env[69784]: INFO nova.compute.manager [-] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] Took 0.03 seconds to deallocate network for instance. [ 1654.690747] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1654.690977] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1654.692364] env[69784]: INFO nova.compute.claims [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1654.782675] env[69784]: DEBUG oslo_concurrency.lockutils [None req-163f49f9-e0d6-42a8-bf04-07264cead378 tempest-ServersNegativeTestMultiTenantJSON-1296261133 tempest-ServersNegativeTestMultiTenantJSON-1296261133-project-member] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1654.783976] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 381.890s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1654.783976] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e6d05e25-386e-43d1-aec4-d62b9476891d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1654.783976] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "e6d05e25-386e-43d1-aec4-d62b9476891d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1654.931130] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ff900f-25d3-4694-bf35-600a9747ff02 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.938219] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bbf52a-e433-4269-ad59-db623e39373d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.975022] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6decb67d-be66-4e5d-b254-13ae746bbae4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.982248] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dfdbed-396a-4eba-a236-8903f2dacf30 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.994722] env[69784]: DEBUG nova.compute.provider_tree [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1655.003785] env[69784]: DEBUG nova.scheduler.client.report [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1655.016830] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1655.017327] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1655.051074] env[69784]: DEBUG nova.compute.utils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1655.052283] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1655.052454] env[69784]: DEBUG nova.network.neutron [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1655.061154] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1655.122207] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1655.135138] env[69784]: DEBUG nova.policy [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd3000955aac4413b0343029bb134280', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b532e382791a418081b96b564cdc6100', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1655.152589] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1655.152829] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1655.152986] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1655.153185] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1655.153331] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1655.153476] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1655.153682] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1655.153841] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1655.154019] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1655.154189] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1655.154359] env[69784]: DEBUG nova.virt.hardware [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1655.155219] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae1ca1c-0853-415e-b2e9-815c9cfbe0bd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.162818] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b4049e-8616-47cc-8309-c6c2c2303223 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.433643] env[69784]: DEBUG nova.network.neutron [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Successfully created port: 2625c2f9-0e9f-437e-bbc4-0d1143a77f9a {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1656.053600] env[69784]: DEBUG nova.network.neutron [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Successfully updated port: 2625c2f9-0e9f-437e-bbc4-0d1143a77f9a {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1656.066683] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "refresh_cache-0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1656.067071] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquired lock "refresh_cache-0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1656.067205] env[69784]: DEBUG nova.network.neutron [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1656.114081] env[69784]: DEBUG nova.network.neutron [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1656.285650] env[69784]: DEBUG nova.network.neutron [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Updating instance_info_cache with network_info: [{"id": "2625c2f9-0e9f-437e-bbc4-0d1143a77f9a", "address": "fa:16:3e:81:18:c2", "network": {"id": "859dfdd9-b780-4d6b-ac6f-16003fab4582", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-294350676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b532e382791a418081b96b564cdc6100", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6076d24d-3c8e-4bbb-ba96-a08fb27a73cc", "external-id": "nsx-vlan-transportzone-267", "segmentation_id": 267, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2625c2f9-0e", "ovs_interfaceid": "2625c2f9-0e9f-437e-bbc4-0d1143a77f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.297998] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Releasing lock "refresh_cache-0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1656.298299] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Instance network_info: |[{"id": "2625c2f9-0e9f-437e-bbc4-0d1143a77f9a", "address": "fa:16:3e:81:18:c2", "network": {"id": "859dfdd9-b780-4d6b-ac6f-16003fab4582", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-294350676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b532e382791a418081b96b564cdc6100", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6076d24d-3c8e-4bbb-ba96-a08fb27a73cc", "external-id": "nsx-vlan-transportzone-267", "segmentation_id": 267, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2625c2f9-0e", "ovs_interfaceid": "2625c2f9-0e9f-437e-bbc4-0d1143a77f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1656.298686] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:18:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6076d24d-3c8e-4bbb-ba96-a08fb27a73cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2625c2f9-0e9f-437e-bbc4-0d1143a77f9a', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1656.305926] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Creating folder: Project (b532e382791a418081b96b564cdc6100). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1656.306465] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f580ed6-0020-41b1-af87-3926316f4918 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.316664] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Created folder: Project (b532e382791a418081b96b564cdc6100) in parent group-v692547. [ 1656.316843] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Creating folder: Instances. Parent ref: group-v692638. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1656.317071] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fe0ea1a-eaa0-439f-9b87-7603e621631d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.326087] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Created folder: Instances in parent group-v692638. [ 1656.326308] env[69784]: DEBUG oslo.service.loopingcall [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1656.326483] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1656.326663] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72e49934-a63d-4bae-9e31-ca27d9fc0a13 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.344714] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1656.344714] env[69784]: value = "task-3467169" [ 1656.344714] env[69784]: _type = "Task" [ 1656.344714] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.351983] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467169, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.543496] env[69784]: DEBUG nova.compute.manager [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Received event network-vif-plugged-2625c2f9-0e9f-437e-bbc4-0d1143a77f9a {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1656.543743] env[69784]: DEBUG oslo_concurrency.lockutils [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] Acquiring lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1656.543953] env[69784]: DEBUG oslo_concurrency.lockutils [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1656.544255] env[69784]: DEBUG oslo_concurrency.lockutils [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1656.544434] env[69784]: DEBUG nova.compute.manager [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] No waiting events found dispatching network-vif-plugged-2625c2f9-0e9f-437e-bbc4-0d1143a77f9a {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1656.544599] env[69784]: WARNING nova.compute.manager [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Received unexpected event network-vif-plugged-2625c2f9-0e9f-437e-bbc4-0d1143a77f9a for instance with vm_state building and task_state spawning. [ 1656.544758] env[69784]: DEBUG nova.compute.manager [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Received event network-changed-2625c2f9-0e9f-437e-bbc4-0d1143a77f9a {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1656.544917] env[69784]: DEBUG nova.compute.manager [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Refreshing instance network info cache due to event network-changed-2625c2f9-0e9f-437e-bbc4-0d1143a77f9a. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1656.545164] env[69784]: DEBUG oslo_concurrency.lockutils [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] Acquiring lock "refresh_cache-0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1656.545314] env[69784]: DEBUG oslo_concurrency.lockutils [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] Acquired lock "refresh_cache-0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1656.545472] env[69784]: DEBUG nova.network.neutron [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Refreshing network info cache for port 2625c2f9-0e9f-437e-bbc4-0d1143a77f9a {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1656.784349] env[69784]: DEBUG nova.network.neutron [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Updated VIF entry in instance network info cache for port 2625c2f9-0e9f-437e-bbc4-0d1143a77f9a. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1656.784679] env[69784]: DEBUG nova.network.neutron [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Updating instance_info_cache with network_info: [{"id": "2625c2f9-0e9f-437e-bbc4-0d1143a77f9a", "address": "fa:16:3e:81:18:c2", "network": {"id": "859dfdd9-b780-4d6b-ac6f-16003fab4582", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-294350676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b532e382791a418081b96b564cdc6100", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6076d24d-3c8e-4bbb-ba96-a08fb27a73cc", "external-id": "nsx-vlan-transportzone-267", "segmentation_id": 267, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2625c2f9-0e", "ovs_interfaceid": "2625c2f9-0e9f-437e-bbc4-0d1143a77f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.793566] env[69784]: DEBUG oslo_concurrency.lockutils [req-6a32a49d-caf0-4641-a8f8-56c1a3979865 req-ad840ebb-883d-4b07-9409-9c67e1d731ce service nova] Releasing lock "refresh_cache-0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1656.856221] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467169, 'name': CreateVM_Task, 'duration_secs': 0.289218} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.858026] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1656.858026] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1656.858168] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1656.858495] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1656.858749] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbabe407-b3b2-4061-be52-7bb6d216fdd0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.863476] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for the task: (returnval){ [ 1656.863476] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c76b90-0bbc-ce3c-7ad5-50596ebaa7c0" [ 1656.863476] env[69784]: _type = "Task" [ 1656.863476] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.870983] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c76b90-0bbc-ce3c-7ad5-50596ebaa7c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.373990] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1657.374306] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1657.374472] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1663.863623] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquiring lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1663.863990] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1666.501090] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1683.918992] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquiring lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1692.161288] env[69784]: DEBUG oslo_concurrency.lockutils [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1697.927901] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f8142916-9abd-406a-96c3-b913ea7fc064 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] Acquiring lock "30939594-471e-4cee-a7f3-2fa62023f897" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1697.927901] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f8142916-9abd-406a-96c3-b913ea7fc064 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] Lock "30939594-471e-4cee-a7f3-2fa62023f897" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1700.840254] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.840553] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.840665] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1701.503445] env[69784]: WARNING oslo_vmware.rw_handles [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1701.503445] env[69784]: ERROR oslo_vmware.rw_handles [ 1701.503908] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1701.506517] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1701.506839] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Copying Virtual Disk [datastore1] vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/5bb425dd-2638-4186-b460-800f3bca2732/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1701.507209] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afe80855-a31b-452a-be4f-76f3dc820508 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.517949] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Waiting for the task: (returnval){ [ 1701.517949] env[69784]: value = "task-3467170" [ 1701.517949] env[69784]: _type = "Task" [ 1701.517949] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.528357] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Task: {'id': task-3467170, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.028098] env[69784]: DEBUG oslo_vmware.exceptions [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1702.028469] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1702.028972] env[69784]: ERROR nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1702.028972] env[69784]: Faults: ['InvalidArgument'] [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Traceback (most recent call last): [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] yield resources [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] self.driver.spawn(context, instance, image_meta, [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] self._fetch_image_if_missing(context, vi) [ 1702.028972] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] image_cache(vi, tmp_image_ds_loc) [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] vm_util.copy_virtual_disk( [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] session._wait_for_task(vmdk_copy_task) [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] return self.wait_for_task(task_ref) [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] return evt.wait() [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] result = hub.switch() [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1702.029368] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] return self.greenlet.switch() [ 1702.029696] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1702.029696] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] self.f(*self.args, **self.kw) [ 1702.029696] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1702.029696] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] raise exceptions.translate_fault(task_info.error) [ 1702.029696] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1702.029696] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Faults: ['InvalidArgument'] [ 1702.029696] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] [ 1702.029696] env[69784]: INFO nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Terminating instance [ 1702.030859] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1702.031077] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1702.031945] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1702.032152] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1702.032375] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e71c4e52-5fd3-45b3-ba5a-5c5544b98922 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.034573] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbee88d-20b7-4eb0-9223-e910fd55c80f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.041243] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1702.041488] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-660f6848-12f2-43df-adf8-b2c722cc21eb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.043555] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1702.043728] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1702.044651] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-860b52fb-49d9-46ca-9dc9-befb0d6cb2af {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.049240] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for the task: (returnval){ [ 1702.049240] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5294479b-80cf-db5e-bfe5-809f7ba42967" [ 1702.049240] env[69784]: _type = "Task" [ 1702.049240] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.056525] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5294479b-80cf-db5e-bfe5-809f7ba42967, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.114011] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1702.114256] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1702.114426] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Deleting the datastore file [datastore1] 26793ea1-2934-4b30-8f8c-6beefe7046f7 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1702.114684] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7f4e0a1-5e80-4ba6-ae26-55dff1e3ca44 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.121160] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Waiting for the task: (returnval){ [ 1702.121160] env[69784]: value = "task-3467172" [ 1702.121160] env[69784]: _type = "Task" [ 1702.121160] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.128678] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Task: {'id': task-3467172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.559757] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1702.560118] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Creating directory with path [datastore1] vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1702.560381] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb5e9f1f-9b96-4c42-925e-ff67a3118d39 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.572138] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Created directory with path [datastore1] vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1702.572293] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Fetch image to [datastore1] vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1702.572477] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1702.573204] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4974bf3e-2dfa-4467-8d86-83535bbcf0ae {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.579532] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78cd82f-f584-4c77-99f1-94c7796520ca {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.588203] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a564975-46a6-4067-bc11-b0ebacbc80ba {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.618452] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aaa7fcb-fddd-4043-bd1d-d5074a2f88c4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.626174] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e33812a1-68a0-4592-9503-8181db9c202f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.630534] env[69784]: DEBUG oslo_vmware.api [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Task: {'id': task-3467172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06501} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.631043] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1702.631266] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1702.631443] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1702.631616] env[69784]: INFO nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1702.633674] env[69784]: DEBUG nova.compute.claims [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1702.633873] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1702.634105] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1702.650230] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1702.699608] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1702.762503] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1702.762698] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1702.839440] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1702.923847] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a2cf10-058b-4a4d-9d9d-8a634a8dda1c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.931519] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7231da66-663b-4488-bb62-c626ee14db1b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.960632] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae96eb05-c76e-4e04-9e4c-0f3fb1bc54ae {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.967838] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb1419b-2548-4727-a2eb-af857ad930e0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.980512] env[69784]: DEBUG nova.compute.provider_tree [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1702.990072] env[69784]: DEBUG nova.scheduler.client.report [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1703.004292] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.370s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1703.004800] env[69784]: ERROR nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1703.004800] env[69784]: Faults: ['InvalidArgument'] [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Traceback (most recent call last): [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] self.driver.spawn(context, instance, image_meta, [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] self._fetch_image_if_missing(context, vi) [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] image_cache(vi, tmp_image_ds_loc) [ 1703.004800] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] vm_util.copy_virtual_disk( [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] session._wait_for_task(vmdk_copy_task) [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] return self.wait_for_task(task_ref) [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] return evt.wait() [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] result = hub.switch() [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] return self.greenlet.switch() [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1703.005212] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] self.f(*self.args, **self.kw) [ 1703.005582] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1703.005582] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] raise exceptions.translate_fault(task_info.error) [ 1703.005582] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1703.005582] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Faults: ['InvalidArgument'] [ 1703.005582] env[69784]: ERROR nova.compute.manager [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] [ 1703.005582] env[69784]: DEBUG nova.compute.utils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1703.006779] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Build of instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 was re-scheduled: A specified parameter was not correct: fileType [ 1703.006779] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1703.007165] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1703.007335] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1703.007500] env[69784]: DEBUG nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1703.007661] env[69784]: DEBUG nova.network.neutron [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1703.373640] env[69784]: DEBUG nova.network.neutron [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.384097] env[69784]: INFO nova.compute.manager [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Took 0.38 seconds to deallocate network for instance. [ 1703.474039] env[69784]: INFO nova.scheduler.client.report [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Deleted allocations for instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 [ 1703.501531] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1d8c6256-a9c5-407d-8aef-bc53ca70d423 tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 613.810s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1703.502837] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 430.609s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1703.503106] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] During sync_power_state the instance has a pending task (spawning). Skip. [ 1703.503341] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1703.504032] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 417.115s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1703.504293] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Acquiring lock "26793ea1-2934-4b30-8f8c-6beefe7046f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1703.504603] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1703.504787] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1703.508021] env[69784]: INFO nova.compute.manager [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Terminating instance [ 1703.509863] env[69784]: DEBUG nova.compute.manager [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1703.510107] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1703.510372] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a6171d3-815e-4145-8f05-33a7dcde1f0d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.515331] env[69784]: DEBUG nova.compute.manager [None req-2372b56a-d9df-4b7a-8f42-bda9cbfa2da0 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] [instance: 6d7f1207-ba9a-4d1c-9499-6c0677fd38ea] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1703.522042] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4be315-01a6-4dc2-84e4-59aa738545d1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.551423] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26793ea1-2934-4b30-8f8c-6beefe7046f7 could not be found. [ 1703.551692] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1703.551805] env[69784]: INFO nova.compute.manager [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1703.552052] env[69784]: DEBUG oslo.service.loopingcall [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1703.552464] env[69784]: DEBUG nova.compute.manager [None req-2372b56a-d9df-4b7a-8f42-bda9cbfa2da0 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] [instance: 6d7f1207-ba9a-4d1c-9499-6c0677fd38ea] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1703.553339] env[69784]: DEBUG nova.compute.manager [-] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1703.553448] env[69784]: DEBUG nova.network.neutron [-] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1703.580739] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2372b56a-d9df-4b7a-8f42-bda9cbfa2da0 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] Lock "6d7f1207-ba9a-4d1c-9499-6c0677fd38ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.372s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1703.582528] env[69784]: DEBUG nova.network.neutron [-] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.589934] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1703.592692] env[69784]: INFO nova.compute.manager [-] [instance: 26793ea1-2934-4b30-8f8c-6beefe7046f7] Took 0.04 seconds to deallocate network for instance. [ 1703.648534] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1703.648788] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1703.650433] env[69784]: INFO nova.compute.claims [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1703.686751] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b2b7726b-5e87-41e6-a4be-2893d3822dde tempest-ServerGroupTestJSON-308585811 tempest-ServerGroupTestJSON-308585811-project-member] Lock "26793ea1-2934-4b30-8f8c-6beefe7046f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1703.839876] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.887107] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429bef00-a86e-46eb-9000-d3d3045a2d66 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.894523] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94301f2c-3305-4408-9910-2b4f1a5690bf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.925166] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1ca6ea-a921-4345-9af8-bbd6218f5394 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.932340] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbc0bba-04c0-4359-9af2-09d418662808 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.946547] env[69784]: DEBUG nova.compute.provider_tree [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.955050] env[69784]: DEBUG nova.scheduler.client.report [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1703.968073] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.319s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1703.968550] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1703.999277] env[69784]: DEBUG nova.compute.utils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1704.000790] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1704.000878] env[69784]: DEBUG nova.network.neutron [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1704.009027] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1704.066748] env[69784]: DEBUG nova.policy [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ca8a3265c314ddb8bcdb6cd3b5781a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6fb8f53aa7bf4aba833d184b63d5faf5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1704.076282] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1704.107256] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1704.107615] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1704.107836] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1704.108117] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1704.108349] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1704.108541] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1704.108833] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1704.109072] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1704.109328] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1704.109597] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1704.109848] env[69784]: DEBUG nova.virt.hardware [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1704.111383] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e2b543-c598-4eb2-b661-2c5a3723aa48 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.119647] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e08b94-18ba-44f9-8ae6-fba59e3662a8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.379154] env[69784]: DEBUG nova.network.neutron [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Successfully created port: 98ba9232-35ba-4d22-8d41-ae88f0bf24f1 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1705.260205] env[69784]: DEBUG nova.network.neutron [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Successfully updated port: 98ba9232-35ba-4d22-8d41-ae88f0bf24f1 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1705.271775] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "refresh_cache-ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1705.271937] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquired lock "refresh_cache-ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1705.272112] env[69784]: DEBUG nova.network.neutron [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1705.318095] env[69784]: DEBUG nova.network.neutron [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1705.408713] env[69784]: DEBUG nova.compute.manager [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Received event network-vif-plugged-98ba9232-35ba-4d22-8d41-ae88f0bf24f1 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1705.409014] env[69784]: DEBUG oslo_concurrency.lockutils [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] Acquiring lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1705.409149] env[69784]: DEBUG oslo_concurrency.lockutils [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1705.409346] env[69784]: DEBUG oslo_concurrency.lockutils [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1705.409468] env[69784]: DEBUG nova.compute.manager [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] No waiting events found dispatching network-vif-plugged-98ba9232-35ba-4d22-8d41-ae88f0bf24f1 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1705.409632] env[69784]: WARNING nova.compute.manager [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Received unexpected event network-vif-plugged-98ba9232-35ba-4d22-8d41-ae88f0bf24f1 for instance with vm_state building and task_state spawning. [ 1705.409792] env[69784]: DEBUG nova.compute.manager [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Received event network-changed-98ba9232-35ba-4d22-8d41-ae88f0bf24f1 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1705.409943] env[69784]: DEBUG nova.compute.manager [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Refreshing instance network info cache due to event network-changed-98ba9232-35ba-4d22-8d41-ae88f0bf24f1. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1705.410843] env[69784]: DEBUG oslo_concurrency.lockutils [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] Acquiring lock "refresh_cache-ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1705.473322] env[69784]: DEBUG nova.network.neutron [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Updating instance_info_cache with network_info: [{"id": "98ba9232-35ba-4d22-8d41-ae88f0bf24f1", "address": "fa:16:3e:59:5a:d5", "network": {"id": "17c4aff7-c0bb-47ef-8da4-2cf89f3363d2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-908452625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6fb8f53aa7bf4aba833d184b63d5faf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98ba9232-35", "ovs_interfaceid": "98ba9232-35ba-4d22-8d41-ae88f0bf24f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.485986] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Releasing lock "refresh_cache-ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1705.486347] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Instance network_info: |[{"id": "98ba9232-35ba-4d22-8d41-ae88f0bf24f1", "address": "fa:16:3e:59:5a:d5", "network": {"id": "17c4aff7-c0bb-47ef-8da4-2cf89f3363d2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-908452625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6fb8f53aa7bf4aba833d184b63d5faf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98ba9232-35", "ovs_interfaceid": "98ba9232-35ba-4d22-8d41-ae88f0bf24f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1705.486657] env[69784]: DEBUG oslo_concurrency.lockutils [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] Acquired lock "refresh_cache-ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1705.486840] env[69784]: DEBUG nova.network.neutron [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Refreshing network info cache for port 98ba9232-35ba-4d22-8d41-ae88f0bf24f1 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1705.488305] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:5a:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98ba9232-35ba-4d22-8d41-ae88f0bf24f1', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1705.495067] env[69784]: DEBUG oslo.service.loopingcall [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1705.497764] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1705.498223] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1651928-048a-4b93-9803-c515f1f50095 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.517963] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1705.517963] env[69784]: value = "task-3467173" [ 1705.517963] env[69784]: _type = "Task" [ 1705.517963] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.528435] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467173, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.795157] env[69784]: DEBUG nova.network.neutron [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Updated VIF entry in instance network info cache for port 98ba9232-35ba-4d22-8d41-ae88f0bf24f1. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1705.795594] env[69784]: DEBUG nova.network.neutron [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Updating instance_info_cache with network_info: [{"id": "98ba9232-35ba-4d22-8d41-ae88f0bf24f1", "address": "fa:16:3e:59:5a:d5", "network": {"id": "17c4aff7-c0bb-47ef-8da4-2cf89f3363d2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-908452625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6fb8f53aa7bf4aba833d184b63d5faf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98ba9232-35", "ovs_interfaceid": "98ba9232-35ba-4d22-8d41-ae88f0bf24f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.805696] env[69784]: DEBUG oslo_concurrency.lockutils [req-8248d542-9404-42cd-9b68-208b87363159 req-450402ba-b1d8-4338-8038-04c47724516d service nova] Releasing lock "refresh_cache-ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1706.027414] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467173, 'name': CreateVM_Task, 'duration_secs': 0.297381} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.027587] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1706.028239] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1706.028435] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1706.028764] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1706.029015] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84495685-2e68-41c8-92a1-83dcab4e19b9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.033382] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for the task: (returnval){ [ 1706.033382] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5208f56b-08db-d680-b1c1-96c979c68681" [ 1706.033382] env[69784]: _type = "Task" [ 1706.033382] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.040647] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5208f56b-08db-d680-b1c1-96c979c68681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.543141] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1706.543451] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1706.543609] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1706.842144] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1707.840415] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1707.840811] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1707.840811] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1707.866198] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.866390] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.866516] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.866647] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.866768] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.866886] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.867014] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.867145] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.867265] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.867379] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1707.867508] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1707.867985] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1707.885088] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1707.885409] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1707.885462] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1707.885618] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1707.886781] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f05158-0aa1-4803-ad20-7aae6275cec7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.895075] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9b5759-5924-41c5-a261-811b2c169fde {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.909160] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29fdb87-4936-41ce-9794-70b812e33104 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.915721] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee084789-94cd-4046-b527-26af00ba255d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.947100] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180948MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1707.947100] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1707.947324] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1708.055077] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.055246] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.055377] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.055500] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.055620] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.055739] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.055856] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.055971] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.056097] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.056213] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1708.068704] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 75542f72-2ff8-44c7-90f2-b33c3391148e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1708.080624] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 85bc1bfa-46dd-4344-8fe5-a035a9574d02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1708.091665] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1708.123257] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1708.134214] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1708.144649] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 30939594-471e-4cee-a7f3-2fa62023f897 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1708.144649] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1708.144649] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1708.338089] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1d401c-a5e2-4d94-8911-331c9ad21957 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.345402] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cfbcba-09ac-4a75-841c-206dfab37de9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.375112] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7864b22f-1107-4ded-a028-1f4b9662bcc0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.381850] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5697f3-574a-4edf-b625-cf68cf74992b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.394267] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.403336] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1708.421898] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1708.422043] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.475s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1710.416573] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1711.839643] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.834966] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.674959] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1734.980119] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "4479a824-1f93-45d0-953f-57736580d86f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1734.980420] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "4479a824-1f93-45d0-953f-57736580d86f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1750.734582] env[69784]: WARNING oslo_vmware.rw_handles [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1750.734582] env[69784]: ERROR oslo_vmware.rw_handles [ 1750.735238] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1750.737187] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1750.737435] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Copying Virtual Disk [datastore1] vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/f070442e-2e18-4cb2-989e-17eb3d94aae0/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1750.737741] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c64eccd6-0e81-418b-92ca-e34b74b53750 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.745956] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for the task: (returnval){ [ 1750.745956] env[69784]: value = "task-3467174" [ 1750.745956] env[69784]: _type = "Task" [ 1750.745956] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.753608] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': task-3467174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.257361] env[69784]: DEBUG oslo_vmware.exceptions [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1751.257717] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1751.258292] env[69784]: ERROR nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1751.258292] env[69784]: Faults: ['InvalidArgument'] [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Traceback (most recent call last): [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] yield resources [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] self.driver.spawn(context, instance, image_meta, [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] self._fetch_image_if_missing(context, vi) [ 1751.258292] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] image_cache(vi, tmp_image_ds_loc) [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] vm_util.copy_virtual_disk( [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] session._wait_for_task(vmdk_copy_task) [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] return self.wait_for_task(task_ref) [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] return evt.wait() [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] result = hub.switch() [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1751.258621] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] return self.greenlet.switch() [ 1751.259012] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1751.259012] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] self.f(*self.args, **self.kw) [ 1751.259012] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1751.259012] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] raise exceptions.translate_fault(task_info.error) [ 1751.259012] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1751.259012] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Faults: ['InvalidArgument'] [ 1751.259012] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] [ 1751.259012] env[69784]: INFO nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Terminating instance [ 1751.260422] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1751.260422] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1751.260620] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b282fdfc-c6d4-4db4-9dc1-3c488d7a6b07 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.262855] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1751.263056] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1751.263784] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b727b483-2ec2-4249-8334-9bfd32cf8c48 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.270836] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1751.270970] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db1d3b4c-fafe-4158-b30b-6c167ca2b75d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.273214] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1751.273385] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1751.274334] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38bd5ca5-95cc-4d69-b9fb-ce126f1d57d3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.279071] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Waiting for the task: (returnval){ [ 1751.279071] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]522d6e01-2d93-5724-45b1-c49bbdf6ca89" [ 1751.279071] env[69784]: _type = "Task" [ 1751.279071] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.286177] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]522d6e01-2d93-5724-45b1-c49bbdf6ca89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.344930] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1751.345168] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1751.345356] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Deleting the datastore file [datastore1] 91546cc5-6e8b-4175-b256-ba19e98c22cc {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1751.345624] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b23b425f-b935-46be-aa33-9ea7518c21b3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.352177] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for the task: (returnval){ [ 1751.352177] env[69784]: value = "task-3467176" [ 1751.352177] env[69784]: _type = "Task" [ 1751.352177] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.360539] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': task-3467176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.789992] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1751.789992] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Creating directory with path [datastore1] vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1751.790330] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3104f9dc-2a96-4ae1-a7ae-ab86db4e94fe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.802497] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Created directory with path [datastore1] vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1751.802711] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Fetch image to [datastore1] vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1751.802883] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1751.803749] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc078b90-5de5-4d57-bcb5-263b2087621f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.810587] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4178c2df-6c2c-4523-aa9f-9680b59b4855 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.819485] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527ea31c-f98e-4236-a61e-619c08aee1f5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.850361] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d955c6b-98af-431d-8a7d-9cc02cc942d2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.861264] env[69784]: DEBUG oslo_vmware.api [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': task-3467176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066452} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.862869] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1751.863035] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1751.863218] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1751.863417] env[69784]: INFO nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1751.865160] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3662cad0-83ed-4ce3-a26b-73e37ed213a9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.867153] env[69784]: DEBUG nova.compute.claims [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1751.867332] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1751.867554] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1751.891339] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1752.022214] env[69784]: DEBUG oslo_vmware.rw_handles [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1752.088455] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1752.088657] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1752.089197] env[69784]: DEBUG oslo_vmware.rw_handles [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1752.089263] env[69784]: DEBUG oslo_vmware.rw_handles [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1752.180822] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcf76a8-5aba-4cda-a043-718e420f83fc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.189076] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88d0a50-1971-4c8f-b8b7-1892d1347de2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.218304] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21c34f0-cd3a-4377-9052-59989025c15f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.225123] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd9c145-5257-46d1-bd5a-80df43433cb2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.237695] env[69784]: DEBUG nova.compute.provider_tree [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.247086] env[69784]: DEBUG nova.scheduler.client.report [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1752.260395] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.393s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1752.260894] env[69784]: ERROR nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1752.260894] env[69784]: Faults: ['InvalidArgument'] [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Traceback (most recent call last): [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] self.driver.spawn(context, instance, image_meta, [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] self._fetch_image_if_missing(context, vi) [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] image_cache(vi, tmp_image_ds_loc) [ 1752.260894] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] vm_util.copy_virtual_disk( [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] session._wait_for_task(vmdk_copy_task) [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] return self.wait_for_task(task_ref) [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] return evt.wait() [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] result = hub.switch() [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] return self.greenlet.switch() [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1752.261254] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] self.f(*self.args, **self.kw) [ 1752.261619] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1752.261619] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] raise exceptions.translate_fault(task_info.error) [ 1752.261619] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1752.261619] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Faults: ['InvalidArgument'] [ 1752.261619] env[69784]: ERROR nova.compute.manager [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] [ 1752.261619] env[69784]: DEBUG nova.compute.utils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1752.262930] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Build of instance 91546cc5-6e8b-4175-b256-ba19e98c22cc was re-scheduled: A specified parameter was not correct: fileType [ 1752.262930] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1752.263352] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1752.263529] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1752.263695] env[69784]: DEBUG nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1752.263858] env[69784]: DEBUG nova.network.neutron [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1752.522456] env[69784]: DEBUG nova.network.neutron [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.532400] env[69784]: INFO nova.compute.manager [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Took 0.27 seconds to deallocate network for instance. [ 1752.638759] env[69784]: INFO nova.scheduler.client.report [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Deleted allocations for instance 91546cc5-6e8b-4175-b256-ba19e98c22cc [ 1752.659066] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a6d3a261-13f6-4eb2-843b-82752275b349 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "91546cc5-6e8b-4175-b256-ba19e98c22cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 620.931s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1752.660546] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "91546cc5-6e8b-4175-b256-ba19e98c22cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.138s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1752.660766] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "91546cc5-6e8b-4175-b256-ba19e98c22cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1752.660976] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "91546cc5-6e8b-4175-b256-ba19e98c22cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1752.661157] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "91546cc5-6e8b-4175-b256-ba19e98c22cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1752.663497] env[69784]: INFO nova.compute.manager [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Terminating instance [ 1752.665561] env[69784]: DEBUG nova.compute.manager [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1752.665757] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1752.666253] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53bf428b-f6fc-4465-a43d-78182440a0a9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.677855] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4626f5-a946-45b5-b1a0-7f914f7c46dd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.690085] env[69784]: DEBUG nova.compute.manager [None req-538e4518-37dd-4dbb-a33c-a0d50d9c0918 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 75542f72-2ff8-44c7-90f2-b33c3391148e] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1752.712159] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 91546cc5-6e8b-4175-b256-ba19e98c22cc could not be found. [ 1752.712254] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1752.712425] env[69784]: INFO nova.compute.manager [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1752.712678] env[69784]: DEBUG oslo.service.loopingcall [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1752.712983] env[69784]: DEBUG nova.compute.manager [-] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1752.713101] env[69784]: DEBUG nova.network.neutron [-] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1752.717378] env[69784]: DEBUG nova.compute.manager [None req-538e4518-37dd-4dbb-a33c-a0d50d9c0918 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 75542f72-2ff8-44c7-90f2-b33c3391148e] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1752.740363] env[69784]: DEBUG oslo_concurrency.lockutils [None req-538e4518-37dd-4dbb-a33c-a0d50d9c0918 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "75542f72-2ff8-44c7-90f2-b33c3391148e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.534s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1752.745027] env[69784]: DEBUG nova.network.neutron [-] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.755027] env[69784]: INFO nova.compute.manager [-] [instance: 91546cc5-6e8b-4175-b256-ba19e98c22cc] Took 0.04 seconds to deallocate network for instance. [ 1752.762927] env[69784]: DEBUG nova.compute.manager [None req-33150eef-d64f-406d-8ffc-49625c424391 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 85bc1bfa-46dd-4344-8fe5-a035a9574d02] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1752.787174] env[69784]: DEBUG nova.compute.manager [None req-33150eef-d64f-406d-8ffc-49625c424391 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: 85bc1bfa-46dd-4344-8fe5-a035a9574d02] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1752.806959] env[69784]: DEBUG oslo_concurrency.lockutils [None req-33150eef-d64f-406d-8ffc-49625c424391 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "85bc1bfa-46dd-4344-8fe5-a035a9574d02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.063s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1752.816330] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1752.861993] env[69784]: DEBUG oslo_concurrency.lockutils [None req-0903f6c3-7277-4e46-b454-a37be63e85be tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "91546cc5-6e8b-4175-b256-ba19e98c22cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1752.870664] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1752.870985] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1752.872455] env[69784]: INFO nova.compute.claims [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1753.091589] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd39829e-fcb0-4013-b377-27dddce76daf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.099296] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44304741-7314-40aa-8bb7-74cb6bc0493f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.130381] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75a29a3-3acc-4253-bbf1-98f64cba025a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.136989] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2994b0-21bd-480c-9afd-df10741c3052 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.149473] env[69784]: DEBUG nova.compute.provider_tree [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1753.157328] env[69784]: DEBUG nova.scheduler.client.report [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1753.170743] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1753.171227] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1753.205309] env[69784]: DEBUG nova.compute.utils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1753.206935] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1753.209478] env[69784]: DEBUG nova.network.neutron [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1753.218974] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1753.273238] env[69784]: DEBUG nova.policy [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0bf6f0865f34a9293a9bcc6d6da958e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2104e9dcb91d4f8db613dd64af3b86c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1753.282940] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1753.309088] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1753.309350] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1753.309511] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1753.309688] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1753.309858] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1753.310034] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1753.310252] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1753.310413] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1753.310583] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1753.310746] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1753.310924] env[69784]: DEBUG nova.virt.hardware [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1753.311782] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf560bee-51f5-4b6b-946b-fc9030716e3a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.319959] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd020c6-3a97-4751-8c73-7e6c108ce1b6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.577717] env[69784]: DEBUG nova.network.neutron [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Successfully created port: c00b3d43-6182-478f-a66a-b0be5257fd76 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1754.330876] env[69784]: DEBUG nova.network.neutron [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Successfully updated port: c00b3d43-6182-478f-a66a-b0be5257fd76 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1754.351466] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquiring lock "refresh_cache-7632e563-1790-442f-9e13-77f3d93e4223" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1754.351637] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquired lock "refresh_cache-7632e563-1790-442f-9e13-77f3d93e4223" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1754.351795] env[69784]: DEBUG nova.network.neutron [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1754.401336] env[69784]: DEBUG nova.network.neutron [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1754.557962] env[69784]: DEBUG nova.compute.manager [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Received event network-vif-plugged-c00b3d43-6182-478f-a66a-b0be5257fd76 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1754.558637] env[69784]: DEBUG oslo_concurrency.lockutils [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] Acquiring lock "7632e563-1790-442f-9e13-77f3d93e4223-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1754.558959] env[69784]: DEBUG oslo_concurrency.lockutils [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] Lock "7632e563-1790-442f-9e13-77f3d93e4223-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1754.559166] env[69784]: DEBUG oslo_concurrency.lockutils [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] Lock "7632e563-1790-442f-9e13-77f3d93e4223-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1754.559344] env[69784]: DEBUG nova.compute.manager [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] No waiting events found dispatching network-vif-plugged-c00b3d43-6182-478f-a66a-b0be5257fd76 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1754.559543] env[69784]: WARNING nova.compute.manager [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Received unexpected event network-vif-plugged-c00b3d43-6182-478f-a66a-b0be5257fd76 for instance with vm_state building and task_state spawning. [ 1754.559712] env[69784]: DEBUG nova.compute.manager [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Received event network-changed-c00b3d43-6182-478f-a66a-b0be5257fd76 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1754.560028] env[69784]: DEBUG nova.compute.manager [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Refreshing instance network info cache due to event network-changed-c00b3d43-6182-478f-a66a-b0be5257fd76. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1754.560153] env[69784]: DEBUG oslo_concurrency.lockutils [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] Acquiring lock "refresh_cache-7632e563-1790-442f-9e13-77f3d93e4223" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1754.582160] env[69784]: DEBUG nova.network.neutron [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Updating instance_info_cache with network_info: [{"id": "c00b3d43-6182-478f-a66a-b0be5257fd76", "address": "fa:16:3e:96:6a:17", "network": {"id": "749e540a-7948-4f78-87b0-d5d8bac7f02b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1540190125-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2104e9dcb91d4f8db613dd64af3b86c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc00b3d43-61", "ovs_interfaceid": "c00b3d43-6182-478f-a66a-b0be5257fd76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.594036] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Releasing lock "refresh_cache-7632e563-1790-442f-9e13-77f3d93e4223" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1754.594444] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Instance network_info: |[{"id": "c00b3d43-6182-478f-a66a-b0be5257fd76", "address": "fa:16:3e:96:6a:17", "network": {"id": "749e540a-7948-4f78-87b0-d5d8bac7f02b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1540190125-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2104e9dcb91d4f8db613dd64af3b86c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc00b3d43-61", "ovs_interfaceid": "c00b3d43-6182-478f-a66a-b0be5257fd76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1754.594778] env[69784]: DEBUG oslo_concurrency.lockutils [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] Acquired lock "refresh_cache-7632e563-1790-442f-9e13-77f3d93e4223" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1754.594961] env[69784]: DEBUG nova.network.neutron [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Refreshing network info cache for port c00b3d43-6182-478f-a66a-b0be5257fd76 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1754.596310] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:6a:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c00b3d43-6182-478f-a66a-b0be5257fd76', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1754.604303] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Creating folder: Project (2104e9dcb91d4f8db613dd64af3b86c7). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1754.605559] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-230e3d4c-74c1-454f-b1f6-735f00b58aeb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.620600] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Created folder: Project (2104e9dcb91d4f8db613dd64af3b86c7) in parent group-v692547. [ 1754.620801] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Creating folder: Instances. Parent ref: group-v692642. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1754.621071] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2a8fca3-b4ef-47ea-8b7b-5705fcb06a9f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.630577] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Created folder: Instances in parent group-v692642. [ 1754.630866] env[69784]: DEBUG oslo.service.loopingcall [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.631022] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1754.631249] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6150868-a746-40a4-bcb3-715d1d50ba02 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.653170] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1754.653170] env[69784]: value = "task-3467179" [ 1754.653170] env[69784]: _type = "Task" [ 1754.653170] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.661161] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467179, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.882875] env[69784]: DEBUG nova.network.neutron [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Updated VIF entry in instance network info cache for port c00b3d43-6182-478f-a66a-b0be5257fd76. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1754.883261] env[69784]: DEBUG nova.network.neutron [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Updating instance_info_cache with network_info: [{"id": "c00b3d43-6182-478f-a66a-b0be5257fd76", "address": "fa:16:3e:96:6a:17", "network": {"id": "749e540a-7948-4f78-87b0-d5d8bac7f02b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1540190125-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2104e9dcb91d4f8db613dd64af3b86c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc00b3d43-61", "ovs_interfaceid": "c00b3d43-6182-478f-a66a-b0be5257fd76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.893996] env[69784]: DEBUG oslo_concurrency.lockutils [req-01c7d4cf-ff80-45bf-94c1-6e776dc8e74b req-129f41f7-4f21-4516-9378-56dd06fc16be service nova] Releasing lock "refresh_cache-7632e563-1790-442f-9e13-77f3d93e4223" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1755.162928] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467179, 'name': CreateVM_Task, 'duration_secs': 0.323602} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.163211] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1755.163858] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1755.164046] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1755.164372] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1755.164613] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db74e578-5b81-4f21-beec-2de8bbd01fb3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.169264] env[69784]: DEBUG oslo_vmware.api [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Waiting for the task: (returnval){ [ 1755.169264] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]520dd4ac-c6f1-aa30-3edf-b08c7d252882" [ 1755.169264] env[69784]: _type = "Task" [ 1755.169264] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.177035] env[69784]: DEBUG oslo_vmware.api [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]520dd4ac-c6f1-aa30-3edf-b08c7d252882, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.679498] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1755.679867] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1755.680158] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1760.841765] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.842080] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1762.840373] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1764.840651] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1765.839958] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1766.840255] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1767.840204] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1767.852800] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1767.853137] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1767.853187] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1767.853461] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1767.855048] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d93b82e-54c0-45f0-81a4-d2200fa3e81a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.863601] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fabbde-2a9c-4c28-8f76-0bf923fb8f68 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.878269] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d61f441-4914-4ead-b724-42d0620264a4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.884627] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b5a993-41f8-4bce-a921-5add50f38fb8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.914175] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180938MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1767.914328] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1767.914514] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1767.985385] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 1473585c-f194-4396-b568-e8c1bc6d049b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.985602] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.985769] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.985942] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.986111] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.986245] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.986367] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.986483] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.986599] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.986712] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1767.997932] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1768.007574] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1768.017339] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 30939594-471e-4cee-a7f3-2fa62023f897 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1768.026651] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1768.035651] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1768.035862] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1768.036026] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1768.204713] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7035c544-95fc-4de6-9d9b-923db363e5fe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.212472] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8fd65c-a330-4cb4-bec7-cedc5b66cdc3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.242104] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0aeadf5-d553-40f8-a957-1125526b5654 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.248798] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f4207d-3c64-4ee3-a0d1-bda78f3af55e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.261543] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.270781] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1768.284742] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1768.284922] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.370s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1769.285196] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1769.285540] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1769.285585] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1769.305245] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.305397] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.305530] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.305662] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.305786] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.305907] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.306455] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.306455] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.306455] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.306455] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1769.306658] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1771.857259] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.839229] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1798.645048] env[69784]: WARNING oslo_vmware.rw_handles [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1798.645048] env[69784]: ERROR oslo_vmware.rw_handles [ 1798.645048] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1798.646799] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1798.647075] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Copying Virtual Disk [datastore1] vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/c245accf-ad3e-4713-a2d2-94d6b6d041fa/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1798.647403] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da4f0efe-e30e-4163-9e25-262e871041e1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.655597] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Waiting for the task: (returnval){ [ 1798.655597] env[69784]: value = "task-3467180" [ 1798.655597] env[69784]: _type = "Task" [ 1798.655597] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.663335] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Task: {'id': task-3467180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.166176] env[69784]: DEBUG oslo_vmware.exceptions [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1799.166486] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1799.167108] env[69784]: ERROR nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1799.167108] env[69784]: Faults: ['InvalidArgument'] [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Traceback (most recent call last): [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] yield resources [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] self.driver.spawn(context, instance, image_meta, [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] self._fetch_image_if_missing(context, vi) [ 1799.167108] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] image_cache(vi, tmp_image_ds_loc) [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] vm_util.copy_virtual_disk( [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] session._wait_for_task(vmdk_copy_task) [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] return self.wait_for_task(task_ref) [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] return evt.wait() [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] result = hub.switch() [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1799.167546] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] return self.greenlet.switch() [ 1799.167966] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1799.167966] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] self.f(*self.args, **self.kw) [ 1799.167966] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1799.167966] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] raise exceptions.translate_fault(task_info.error) [ 1799.167966] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1799.167966] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Faults: ['InvalidArgument'] [ 1799.167966] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] [ 1799.167966] env[69784]: INFO nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Terminating instance [ 1799.168977] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1799.169194] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1799.169457] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a74741cf-ec4b-443a-8808-db97e8b5b434 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.171860] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1799.172067] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1799.172788] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb15d62b-38b3-4a8c-ae30-dc4a7e3a9c82 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.179362] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1799.179587] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fedb204-d363-49f9-992f-09447613543a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.181761] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1799.181934] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1799.182881] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-550428bb-677a-4290-b54d-545aa00fceff {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.187527] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Waiting for the task: (returnval){ [ 1799.187527] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52aa91d6-affd-120d-9edb-ed68c9ed8c76" [ 1799.187527] env[69784]: _type = "Task" [ 1799.187527] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.194888] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52aa91d6-affd-120d-9edb-ed68c9ed8c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.246733] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1799.247018] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1799.247289] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Deleting the datastore file [datastore1] 1473585c-f194-4396-b568-e8c1bc6d049b {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1799.247580] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f079b0c8-46ad-4d8c-b3ec-212077b9e657 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.253092] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Waiting for the task: (returnval){ [ 1799.253092] env[69784]: value = "task-3467182" [ 1799.253092] env[69784]: _type = "Task" [ 1799.253092] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.260404] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Task: {'id': task-3467182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.698403] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1799.698692] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Creating directory with path [datastore1] vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1799.699893] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17364f19-ad80-4689-b673-2e92c6b3d6aa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.710803] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Created directory with path [datastore1] vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1799.710897] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Fetch image to [datastore1] vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1799.711075] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1799.711805] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b975e9-c797-454c-9daf-c8d83b7cc4cf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.719561] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5423236-bcc0-4044-9f84-3f4aa0b17857 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.728489] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11182efe-4dfe-4c12-8c3b-ab93bffa2334 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.762504] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118462f5-1e38-4030-981d-9c3649f5306d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.769351] env[69784]: DEBUG oslo_vmware.api [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Task: {'id': task-3467182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075722} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.770852] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1799.771058] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1799.771239] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1799.771417] env[69784]: INFO nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1799.773191] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8bf53882-464c-4a99-8305-9958af67deee {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.775123] env[69784]: DEBUG nova.compute.claims [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1799.775303] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1799.775522] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1799.799043] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1799.846040] env[69784]: DEBUG oslo_vmware.rw_handles [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1799.907740] env[69784]: DEBUG oslo_vmware.rw_handles [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1799.907925] env[69784]: DEBUG oslo_vmware.rw_handles [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1800.046523] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe4d5c5-f57e-467c-8224-f40a6ca2b03f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.054095] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597e3b71-7bd8-47cd-a277-29e3858dd84c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.084195] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1dfa23-83bf-4bac-87b4-887b0eb28cb2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.090983] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44e3093-487c-4c7b-a54a-4d3045118a29 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.103589] env[69784]: DEBUG nova.compute.provider_tree [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.111902] env[69784]: DEBUG nova.scheduler.client.report [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1800.126168] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.351s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1800.126733] env[69784]: ERROR nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1800.126733] env[69784]: Faults: ['InvalidArgument'] [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Traceback (most recent call last): [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] self.driver.spawn(context, instance, image_meta, [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] self._fetch_image_if_missing(context, vi) [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] image_cache(vi, tmp_image_ds_loc) [ 1800.126733] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] vm_util.copy_virtual_disk( [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] session._wait_for_task(vmdk_copy_task) [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] return self.wait_for_task(task_ref) [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] return evt.wait() [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] result = hub.switch() [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] return self.greenlet.switch() [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1800.127169] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] self.f(*self.args, **self.kw) [ 1800.127896] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1800.127896] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] raise exceptions.translate_fault(task_info.error) [ 1800.127896] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1800.127896] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Faults: ['InvalidArgument'] [ 1800.127896] env[69784]: ERROR nova.compute.manager [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] [ 1800.127896] env[69784]: DEBUG nova.compute.utils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1800.128884] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Build of instance 1473585c-f194-4396-b568-e8c1bc6d049b was re-scheduled: A specified parameter was not correct: fileType [ 1800.128884] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1800.129290] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1800.129470] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1800.129641] env[69784]: DEBUG nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1800.129804] env[69784]: DEBUG nova.network.neutron [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1800.663719] env[69784]: DEBUG nova.network.neutron [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.675019] env[69784]: INFO nova.compute.manager [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Took 0.54 seconds to deallocate network for instance. [ 1800.782602] env[69784]: INFO nova.scheduler.client.report [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Deleted allocations for instance 1473585c-f194-4396-b568-e8c1bc6d049b [ 1800.809045] env[69784]: DEBUG oslo_concurrency.lockutils [None req-ff5e1869-bed8-4582-9303-6907b47c87fd tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "1473585c-f194-4396-b568-e8c1bc6d049b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 585.558s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1800.810356] env[69784]: DEBUG oslo_concurrency.lockutils [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "1473585c-f194-4396-b568-e8c1bc6d049b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 388.552s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1800.810511] env[69784]: DEBUG oslo_concurrency.lockutils [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Acquiring lock "1473585c-f194-4396-b568-e8c1bc6d049b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1800.810739] env[69784]: DEBUG oslo_concurrency.lockutils [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "1473585c-f194-4396-b568-e8c1bc6d049b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1800.810925] env[69784]: DEBUG oslo_concurrency.lockutils [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "1473585c-f194-4396-b568-e8c1bc6d049b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1800.812980] env[69784]: INFO nova.compute.manager [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Terminating instance [ 1800.814613] env[69784]: DEBUG nova.compute.manager [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1800.814804] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1800.815590] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c93a736-4ee4-4fa8-b0d3-ab31fdb04df4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.824387] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394c3cd5-a0a3-4a42-96e9-fca6a75cfd48 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.838274] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1800.856562] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1473585c-f194-4396-b568-e8c1bc6d049b could not be found. [ 1800.856562] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1800.856706] env[69784]: INFO nova.compute.manager [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1800.856946] env[69784]: DEBUG oslo.service.loopingcall [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1800.857201] env[69784]: DEBUG nova.compute.manager [-] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1800.857522] env[69784]: DEBUG nova.network.neutron [-] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1800.887031] env[69784]: DEBUG nova.network.neutron [-] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.893231] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1800.893469] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1800.895071] env[69784]: INFO nova.compute.claims [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1800.897838] env[69784]: INFO nova.compute.manager [-] [instance: 1473585c-f194-4396-b568-e8c1bc6d049b] Took 0.04 seconds to deallocate network for instance. [ 1800.985659] env[69784]: DEBUG oslo_concurrency.lockutils [None req-58747662-4f50-4a3c-a26b-c0576d919e86 tempest-ServerAddressesNegativeTestJSON-156777135 tempest-ServerAddressesNegativeTestJSON-156777135-project-member] Lock "1473585c-f194-4396-b568-e8c1bc6d049b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1801.105118] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532c4616-b365-403b-9339-ca22c67c415d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.113052] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea26b0cf-e60e-46de-a6d7-4c8bd0a36c2c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.142123] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589525cd-5cd9-444f-89b3-be64b225f9a5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.148692] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d58f235-4ad6-4794-9e0c-c22a590f35e6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.161240] env[69784]: DEBUG nova.compute.provider_tree [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1801.170712] env[69784]: DEBUG nova.scheduler.client.report [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1801.184178] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1801.184652] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1801.218388] env[69784]: DEBUG nova.compute.utils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1801.219841] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1801.220022] env[69784]: DEBUG nova.network.neutron [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1801.227897] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1801.279371] env[69784]: DEBUG nova.policy [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed425f078af549e9b7a61cf53066bb1e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a9ca6c244dd4ce59b9938547e24c7db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1801.290405] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1801.315983] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1801.316255] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1801.316412] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1801.316588] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1801.316734] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1801.316898] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1801.317189] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1801.317358] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1801.317526] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1801.317691] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1801.317864] env[69784]: DEBUG nova.virt.hardware [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1801.318749] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa9e0f8-ea93-40b6-944c-8a3b54bafe4a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.326961] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32c00c9-6971-4db3-bed6-93caf670ffdd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.623010] env[69784]: DEBUG nova.network.neutron [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Successfully created port: f547ea0c-6684-43a3-a1ed-cdab4aebbec6 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1802.295804] env[69784]: DEBUG nova.network.neutron [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Successfully updated port: f547ea0c-6684-43a3-a1ed-cdab4aebbec6 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1802.304686] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "refresh_cache-871e3b73-d4d8-4081-8b92-0dee212d8961" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1802.304839] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired lock "refresh_cache-871e3b73-d4d8-4081-8b92-0dee212d8961" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1802.305068] env[69784]: DEBUG nova.network.neutron [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1802.348172] env[69784]: DEBUG nova.network.neutron [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1802.542928] env[69784]: DEBUG nova.network.neutron [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Updating instance_info_cache with network_info: [{"id": "f547ea0c-6684-43a3-a1ed-cdab4aebbec6", "address": "fa:16:3e:8b:8b:b6", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf547ea0c-66", "ovs_interfaceid": "f547ea0c-6684-43a3-a1ed-cdab4aebbec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.554304] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Releasing lock "refresh_cache-871e3b73-d4d8-4081-8b92-0dee212d8961" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1802.554593] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Instance network_info: |[{"id": "f547ea0c-6684-43a3-a1ed-cdab4aebbec6", "address": "fa:16:3e:8b:8b:b6", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf547ea0c-66", "ovs_interfaceid": "f547ea0c-6684-43a3-a1ed-cdab4aebbec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1802.555009] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:8b:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f547ea0c-6684-43a3-a1ed-cdab4aebbec6', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1802.563213] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Creating folder: Project (2a9ca6c244dd4ce59b9938547e24c7db). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1802.563740] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32ca1cb4-6f09-49ac-9aa8-95defbc48da9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.573716] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Created folder: Project (2a9ca6c244dd4ce59b9938547e24c7db) in parent group-v692547. [ 1802.573867] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Creating folder: Instances. Parent ref: group-v692645. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1802.574110] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07134b18-336f-402f-b811-fa947e806697 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.581972] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Created folder: Instances in parent group-v692645. [ 1802.582218] env[69784]: DEBUG oslo.service.loopingcall [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.582400] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1802.582593] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fb35d7a-2d4d-407b-9f6f-1595126ac2f9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.602324] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1802.602324] env[69784]: value = "task-3467185" [ 1802.602324] env[69784]: _type = "Task" [ 1802.602324] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.609564] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467185, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.764294] env[69784]: DEBUG nova.compute.manager [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Received event network-vif-plugged-f547ea0c-6684-43a3-a1ed-cdab4aebbec6 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1802.764574] env[69784]: DEBUG oslo_concurrency.lockutils [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] Acquiring lock "871e3b73-d4d8-4081-8b92-0dee212d8961-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1802.764900] env[69784]: DEBUG oslo_concurrency.lockutils [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1802.765118] env[69784]: DEBUG oslo_concurrency.lockutils [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1802.765304] env[69784]: DEBUG nova.compute.manager [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] No waiting events found dispatching network-vif-plugged-f547ea0c-6684-43a3-a1ed-cdab4aebbec6 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1802.765476] env[69784]: WARNING nova.compute.manager [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Received unexpected event network-vif-plugged-f547ea0c-6684-43a3-a1ed-cdab4aebbec6 for instance with vm_state building and task_state spawning. [ 1802.765643] env[69784]: DEBUG nova.compute.manager [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Received event network-changed-f547ea0c-6684-43a3-a1ed-cdab4aebbec6 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1802.765797] env[69784]: DEBUG nova.compute.manager [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Refreshing instance network info cache due to event network-changed-f547ea0c-6684-43a3-a1ed-cdab4aebbec6. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1802.765999] env[69784]: DEBUG oslo_concurrency.lockutils [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] Acquiring lock "refresh_cache-871e3b73-d4d8-4081-8b92-0dee212d8961" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1802.766158] env[69784]: DEBUG oslo_concurrency.lockutils [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] Acquired lock "refresh_cache-871e3b73-d4d8-4081-8b92-0dee212d8961" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1802.766317] env[69784]: DEBUG nova.network.neutron [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Refreshing network info cache for port f547ea0c-6684-43a3-a1ed-cdab4aebbec6 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1803.100590] env[69784]: DEBUG nova.network.neutron [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Updated VIF entry in instance network info cache for port f547ea0c-6684-43a3-a1ed-cdab4aebbec6. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1803.100947] env[69784]: DEBUG nova.network.neutron [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Updating instance_info_cache with network_info: [{"id": "f547ea0c-6684-43a3-a1ed-cdab4aebbec6", "address": "fa:16:3e:8b:8b:b6", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf547ea0c-66", "ovs_interfaceid": "f547ea0c-6684-43a3-a1ed-cdab4aebbec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.111583] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467185, 'name': CreateVM_Task, 'duration_secs': 0.311856} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.112332] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1803.112793] env[69784]: DEBUG oslo_concurrency.lockutils [req-df20f91e-460b-454d-b836-f3312277707b req-b28e7fb1-f556-4278-94a7-9b390a1a5d03 service nova] Releasing lock "refresh_cache-871e3b73-d4d8-4081-8b92-0dee212d8961" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1803.115026] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1803.115026] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1803.115026] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1803.115026] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d32e238-248d-4f24-a6ea-c23e5e7b8e1d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.118871] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for the task: (returnval){ [ 1803.118871] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5284322e-fc82-d2a0-a2a2-ce18865246f0" [ 1803.118871] env[69784]: _type = "Task" [ 1803.118871] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.127014] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5284322e-fc82-d2a0-a2a2-ce18865246f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.631445] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1803.631759] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1803.631887] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1811.292207] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquiring lock "7632e563-1790-442f-9e13-77f3d93e4223" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1816.728562] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "871e3b73-d4d8-4081-8b92-0dee212d8961" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1821.840423] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.840701] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1823.842234] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1825.839998] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1825.839998] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.840368] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1828.840258] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1828.851612] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1828.851827] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1828.851991] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1828.852165] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1828.853274] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3cd327-3363-46ca-a9b1-14555d32da64 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.862294] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1eb0e7c-6a5d-4aa5-8cbb-2237338f171b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.876149] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f0336f-6022-430b-86cc-11dbc3a749b0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.882535] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb5df2e-294b-43f6-84c2-33a8b87b9feb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.912467] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180945MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1828.912630] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1828.912818] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1829.074829] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 697cd7aa-d710-4e46-b241-085961a8631d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.074997] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.075150] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.075272] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.075391] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.075509] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.075620] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.075736] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.075849] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.075961] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1829.089260] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1829.101204] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 30939594-471e-4cee-a7f3-2fa62023f897 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1829.111015] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1829.120392] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1829.120641] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1829.120793] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1829.141955] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing inventories for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1829.157245] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating ProviderTree inventory for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1829.157465] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating inventory in ProviderTree for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1829.170651] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing aggregate associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, aggregates: None {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1829.191919] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing trait associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1829.344847] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004d67ae-e39f-485b-a462-d7f4b9a05504 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.352359] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca25868e-d1dd-4069-a70b-bdf2464b3b5c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.383194] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437e916a-13ce-4a74-b353-64618f337e5e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.390213] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a994e3-f374-4006-9d4c-4acd35e63b22 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.403174] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1829.411431] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1829.427265] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1829.427455] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.515s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1830.839595] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1830.839880] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1830.839922] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1830.860731] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.860967] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861053] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861167] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861288] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861406] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861526] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861646] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861762] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861878] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1830.861995] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1830.862534] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1830.862677] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances with incomplete migration {{(pid=69784) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1830.869629] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.870413] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.839776] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.841356] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.841672] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1836.850446] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] There are 0 instances to clean {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1837.845370] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.579651] env[69784]: WARNING oslo_vmware.rw_handles [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1846.579651] env[69784]: ERROR oslo_vmware.rw_handles [ 1846.580595] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1846.581964] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1846.582218] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Copying Virtual Disk [datastore1] vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/76731665-cdfc-48ca-a939-ccf02d4ec9bc/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1846.582499] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71a4c2ad-2db4-4ce3-b3ea-db8995e54e06 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.590384] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Waiting for the task: (returnval){ [ 1846.590384] env[69784]: value = "task-3467186" [ 1846.590384] env[69784]: _type = "Task" [ 1846.590384] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.598187] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Task: {'id': task-3467186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.100979] env[69784]: DEBUG oslo_vmware.exceptions [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1847.101220] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1847.101832] env[69784]: ERROR nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1847.101832] env[69784]: Faults: ['InvalidArgument'] [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Traceback (most recent call last): [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] yield resources [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self.driver.spawn(context, instance, image_meta, [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._fetch_image_if_missing(context, vi) [ 1847.101832] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] image_cache(vi, tmp_image_ds_loc) [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] vm_util.copy_virtual_disk( [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] session._wait_for_task(vmdk_copy_task) [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return self.wait_for_task(task_ref) [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return evt.wait() [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] result = hub.switch() [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1847.102356] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return self.greenlet.switch() [ 1847.102772] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1847.102772] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self.f(*self.args, **self.kw) [ 1847.102772] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1847.102772] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] raise exceptions.translate_fault(task_info.error) [ 1847.102772] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1847.102772] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Faults: ['InvalidArgument'] [ 1847.102772] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] [ 1847.102772] env[69784]: INFO nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Terminating instance [ 1847.103668] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1847.103877] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1847.104135] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4920eb4f-a627-40c6-9585-12c699e2641f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.106378] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1847.106541] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquired lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1847.106705] env[69784]: DEBUG nova.network.neutron [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1847.113256] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1847.113425] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1847.114566] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f2baa05-7d3c-4ce9-9e8d-12e496aa20c0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.121463] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Waiting for the task: (returnval){ [ 1847.121463] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]522aa863-b5dc-283e-48b5-941767bf51e6" [ 1847.121463] env[69784]: _type = "Task" [ 1847.121463] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.129334] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]522aa863-b5dc-283e-48b5-941767bf51e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.134614] env[69784]: DEBUG nova.network.neutron [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1847.198335] env[69784]: DEBUG nova.network.neutron [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.207420] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Releasing lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1847.207813] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1847.207999] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1847.209070] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815faa8f-19c7-4f2e-b034-fa9b59b3307a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.217960] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1847.218207] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1c6cd54-e3e5-475b-8afa-9828f088667d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.243323] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1847.243515] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1847.243693] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Deleting the datastore file [datastore1] 697cd7aa-d710-4e46-b241-085961a8631d {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1847.243935] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bcb5c73-e4b8-426f-9eca-25b7e3bef57b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.249388] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Waiting for the task: (returnval){ [ 1847.249388] env[69784]: value = "task-3467188" [ 1847.249388] env[69784]: _type = "Task" [ 1847.249388] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.256879] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Task: {'id': task-3467188, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.633096] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1847.633096] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Creating directory with path [datastore1] vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1847.633096] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5818b757-d026-4bec-a8c5-81829c62cf92 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.644035] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Created directory with path [datastore1] vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1847.644225] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Fetch image to [datastore1] vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1847.644391] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1847.645075] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e4b666-1923-47b9-9f1d-9f28cd527dd6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.651387] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c3d106-39f8-4575-b83d-1de88f5cc6b5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.659931] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d7b9be-d865-43df-9a88-095641027550 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.689102] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb36a4f-5245-4f93-86c8-b76c458fd4c2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.694135] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7ba87076-7e45-4701-9599-14a1929e1d06 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.712609] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1847.758587] env[69784]: DEBUG oslo_vmware.api [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Task: {'id': task-3467188, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031073} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.758837] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1847.759026] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1847.759288] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1847.759467] env[69784]: INFO nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Took 0.55 seconds to destroy the instance on the hypervisor. [ 1847.759758] env[69784]: DEBUG oslo.service.loopingcall [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1847.759979] env[69784]: DEBUG nova.compute.manager [-] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network deallocation for instance since networking was not requested. {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1847.762143] env[69784]: DEBUG nova.compute.claims [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1847.762320] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1847.762529] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1847.835685] env[69784]: DEBUG oslo_vmware.rw_handles [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1847.896172] env[69784]: DEBUG oslo_vmware.rw_handles [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1847.896364] env[69784]: DEBUG oslo_vmware.rw_handles [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1848.020648] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe2fc41-01f9-42a6-99d8-c91421e63a0e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.029355] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86162bfa-c607-4efc-b28d-7100a5fcfe03 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.057781] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a4bca5-4f70-496b-9932-429b3310e284 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.064626] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d457a5-2037-4c30-b645-0cad017a49d0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.077803] env[69784]: DEBUG nova.compute.provider_tree [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.086096] env[69784]: DEBUG nova.scheduler.client.report [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1848.099463] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1848.100029] env[69784]: ERROR nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1848.100029] env[69784]: Faults: ['InvalidArgument'] [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Traceback (most recent call last): [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self.driver.spawn(context, instance, image_meta, [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._fetch_image_if_missing(context, vi) [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] image_cache(vi, tmp_image_ds_loc) [ 1848.100029] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] vm_util.copy_virtual_disk( [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] session._wait_for_task(vmdk_copy_task) [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return self.wait_for_task(task_ref) [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return evt.wait() [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] result = hub.switch() [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return self.greenlet.switch() [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1848.100434] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self.f(*self.args, **self.kw) [ 1848.100830] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1848.100830] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] raise exceptions.translate_fault(task_info.error) [ 1848.100830] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1848.100830] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Faults: ['InvalidArgument'] [ 1848.100830] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] [ 1848.100830] env[69784]: DEBUG nova.compute.utils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1848.102221] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Build of instance 697cd7aa-d710-4e46-b241-085961a8631d was re-scheduled: A specified parameter was not correct: fileType [ 1848.102221] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1848.102594] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1848.102811] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1848.102960] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquired lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1848.103135] env[69784]: DEBUG nova.network.neutron [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1848.127237] env[69784]: DEBUG nova.network.neutron [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1848.183032] env[69784]: DEBUG nova.network.neutron [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.191462] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Releasing lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1848.191702] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1848.191908] env[69784]: DEBUG nova.compute.manager [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Skipping network deallocation for instance since networking was not requested. {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1848.275877] env[69784]: INFO nova.scheduler.client.report [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Deleted allocations for instance 697cd7aa-d710-4e46-b241-085961a8631d [ 1848.297071] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5429b66a-d07d-4d1a-878a-875828021b1d tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "697cd7aa-d710-4e46-b241-085961a8631d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.054s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1848.298250] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "697cd7aa-d710-4e46-b241-085961a8631d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.626s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1848.298662] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "697cd7aa-d710-4e46-b241-085961a8631d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1848.298872] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "697cd7aa-d710-4e46-b241-085961a8631d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1848.299053] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "697cd7aa-d710-4e46-b241-085961a8631d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1848.301012] env[69784]: INFO nova.compute.manager [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Terminating instance [ 1848.302570] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquiring lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1848.302725] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Acquired lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1848.302891] env[69784]: DEBUG nova.network.neutron [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1848.308230] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1848.326327] env[69784]: DEBUG nova.network.neutron [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1848.357308] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1848.357308] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1848.357963] env[69784]: INFO nova.compute.claims [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1848.383467] env[69784]: DEBUG nova.network.neutron [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.391569] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Releasing lock "refresh_cache-697cd7aa-d710-4e46-b241-085961a8631d" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1848.391915] env[69784]: DEBUG nova.compute.manager [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1848.392238] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1848.392863] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81b0c99b-9c2f-416e-b73c-8c7fc1c1044c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.405261] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dea68d2-f2b0-4ac6-a898-b6e1afda51e6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.436036] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 697cd7aa-d710-4e46-b241-085961a8631d could not be found. [ 1848.436255] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1848.436425] env[69784]: INFO nova.compute.manager [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1848.436652] env[69784]: DEBUG oslo.service.loopingcall [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1848.438968] env[69784]: DEBUG nova.compute.manager [-] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1848.439092] env[69784]: DEBUG nova.network.neutron [-] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1848.547342] env[69784]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69784) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1848.547587] env[69784]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-8d7c493d-2541-4c1a-9b19-246402081e34'] [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1848.548174] env[69784]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1848.548758] env[69784]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1848.549221] env[69784]: ERROR oslo.service.loopingcall [ 1848.549620] env[69784]: ERROR nova.compute.manager [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1848.582942] env[69784]: ERROR nova.compute.manager [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Traceback (most recent call last): [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] ret = obj(*args, **kwargs) [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] exception_handler_v20(status_code, error_body) [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] raise client_exc(message=error_message, [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Neutron server returns request_ids: ['req-8d7c493d-2541-4c1a-9b19-246402081e34'] [ 1848.582942] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] During handling of the above exception, another exception occurred: [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Traceback (most recent call last): [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._delete_instance(context, instance, bdms) [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._shutdown_instance(context, instance, bdms) [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._try_deallocate_network(context, instance, requested_networks) [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] with excutils.save_and_reraise_exception(): [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1848.583439] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self.force_reraise() [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] raise self.value [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] _deallocate_network_with_retries() [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return evt.wait() [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] result = hub.switch() [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return self.greenlet.switch() [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1848.583861] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] result = func(*self.args, **self.kw) [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] result = f(*args, **kwargs) [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._deallocate_network( [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self.network_api.deallocate_for_instance( [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] data = neutron.list_ports(**search_opts) [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] ret = obj(*args, **kwargs) [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return self.list('ports', self.ports_path, retrieve_all, [ 1848.584244] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] ret = obj(*args, **kwargs) [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] for r in self._pagination(collection, path, **params): [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] res = self.get(path, params=params) [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] ret = obj(*args, **kwargs) [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return self.retry_request("GET", action, body=body, [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] ret = obj(*args, **kwargs) [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1848.584695] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] return self.do_request(method, action, body=body, [ 1848.585127] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.585127] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] ret = obj(*args, **kwargs) [ 1848.585127] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1848.585127] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] self._handle_fault_response(status_code, replybody, resp) [ 1848.585127] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1848.585127] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1848.585127] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1848.585127] env[69784]: ERROR nova.compute.manager [instance: 697cd7aa-d710-4e46-b241-085961a8631d] [ 1848.589227] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca9a24e-8d56-4161-99e1-87a74abb3991 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.596677] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6d2812-5c3b-412c-ae40-b5c70e2d7b13 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.629347] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3522ad2d-a130-45e3-8f1c-dc7c8fef3905 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.636945] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0e71aa-5245-4d69-9140-603dd2fe4ce9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.641999] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Lock "697cd7aa-d710-4e46-b241-085961a8631d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.344s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1848.654860] env[69784]: DEBUG nova.compute.provider_tree [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.669016] env[69784]: DEBUG nova.scheduler.client.report [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1848.678867] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.322s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1848.679388] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1848.700060] env[69784]: INFO nova.compute.manager [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] [instance: 697cd7aa-d710-4e46-b241-085961a8631d] Successfully reverted task state from None on failure for instance. [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server [None req-c99ed6cd-b041-4b5e-b846-7e23ee4c777b tempest-ServerShowV254Test-375904905 tempest-ServerShowV254Test-375904905-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-8d7c493d-2541-4c1a-9b19-246402081e34'] [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1848.703427] env[69784]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1848.703977] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1848.704524] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1848.705067] env[69784]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.705642] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1848.706215] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1848.706787] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1848.706787] env[69784]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1848.706787] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1848.706787] env[69784]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1848.706787] env[69784]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1848.706787] env[69784]: ERROR oslo_messaging.rpc.server [ 1848.710692] env[69784]: DEBUG nova.compute.utils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1848.711865] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1848.712061] env[69784]: DEBUG nova.network.neutron [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1848.719397] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1848.775986] env[69784]: DEBUG nova.policy [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5471b3d40f9e45d499d0c878832f6b4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5304678ef076486db412f8fb1dd50af8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1848.779085] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1848.803254] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1848.803504] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1848.803661] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1848.803841] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1848.803988] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1848.804150] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1848.804362] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1848.804529] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1848.804696] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1848.804857] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1848.805036] env[69784]: DEBUG nova.virt.hardware [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1848.805868] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8f448d-e6e5-447d-a5eb-828b1adc1b74 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.813670] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1be743-e6e7-402a-9ccc-4180221a1fd9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.141878] env[69784]: DEBUG nova.network.neutron [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Successfully created port: 47d7a441-d972-4dc7-b547-08c26ce51fda {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1849.751703] env[69784]: DEBUG nova.compute.manager [req-4d382b0d-9d9d-491c-8033-27215999d263 req-abaf59e5-bc6b-4bf2-a494-589ab728d47a service nova] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Received event network-vif-plugged-47d7a441-d972-4dc7-b547-08c26ce51fda {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1849.751703] env[69784]: DEBUG oslo_concurrency.lockutils [req-4d382b0d-9d9d-491c-8033-27215999d263 req-abaf59e5-bc6b-4bf2-a494-589ab728d47a service nova] Acquiring lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1849.751703] env[69784]: DEBUG oslo_concurrency.lockutils [req-4d382b0d-9d9d-491c-8033-27215999d263 req-abaf59e5-bc6b-4bf2-a494-589ab728d47a service nova] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1849.751703] env[69784]: DEBUG oslo_concurrency.lockutils [req-4d382b0d-9d9d-491c-8033-27215999d263 req-abaf59e5-bc6b-4bf2-a494-589ab728d47a service nova] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1849.752532] env[69784]: DEBUG nova.compute.manager [req-4d382b0d-9d9d-491c-8033-27215999d263 req-abaf59e5-bc6b-4bf2-a494-589ab728d47a service nova] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] No waiting events found dispatching network-vif-plugged-47d7a441-d972-4dc7-b547-08c26ce51fda {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1849.752532] env[69784]: WARNING nova.compute.manager [req-4d382b0d-9d9d-491c-8033-27215999d263 req-abaf59e5-bc6b-4bf2-a494-589ab728d47a service nova] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Received unexpected event network-vif-plugged-47d7a441-d972-4dc7-b547-08c26ce51fda for instance with vm_state building and task_state spawning. [ 1849.859207] env[69784]: DEBUG nova.network.neutron [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Successfully updated port: 47d7a441-d972-4dc7-b547-08c26ce51fda {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1849.873256] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquiring lock "refresh_cache-ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1849.873413] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquired lock "refresh_cache-ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1849.873566] env[69784]: DEBUG nova.network.neutron [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1849.934182] env[69784]: DEBUG nova.network.neutron [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1850.322125] env[69784]: DEBUG nova.network.neutron [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Updating instance_info_cache with network_info: [{"id": "47d7a441-d972-4dc7-b547-08c26ce51fda", "address": "fa:16:3e:7e:9c:dc", "network": {"id": "5aefb596-86e5-47b1-8255-1e57f9ae3ff0", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-959946745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5304678ef076486db412f8fb1dd50af8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd0eb882-ab95-4373-aa20-ee565a9919e3", "external-id": "nsx-vlan-transportzone-510", "segmentation_id": 510, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d7a441-d9", "ovs_interfaceid": "47d7a441-d972-4dc7-b547-08c26ce51fda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.332177] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Releasing lock "refresh_cache-ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1850.332480] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Instance network_info: |[{"id": "47d7a441-d972-4dc7-b547-08c26ce51fda", "address": "fa:16:3e:7e:9c:dc", "network": {"id": "5aefb596-86e5-47b1-8255-1e57f9ae3ff0", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-959946745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5304678ef076486db412f8fb1dd50af8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd0eb882-ab95-4373-aa20-ee565a9919e3", "external-id": "nsx-vlan-transportzone-510", "segmentation_id": 510, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d7a441-d9", "ovs_interfaceid": "47d7a441-d972-4dc7-b547-08c26ce51fda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1850.332883] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:9c:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd0eb882-ab95-4373-aa20-ee565a9919e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47d7a441-d972-4dc7-b547-08c26ce51fda', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1850.340474] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Creating folder: Project (5304678ef076486db412f8fb1dd50af8). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1850.341037] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56a34ac0-9cd8-40ed-96a7-59228a65f442 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.352051] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Created folder: Project (5304678ef076486db412f8fb1dd50af8) in parent group-v692547. [ 1850.352262] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Creating folder: Instances. Parent ref: group-v692648. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1850.352495] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-340e48aa-29fc-48d4-9831-f5c254c99768 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.360514] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Created folder: Instances in parent group-v692648. [ 1850.360734] env[69784]: DEBUG oslo.service.loopingcall [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1850.360980] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1850.361202] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e707580-6722-4852-8718-422e1d9926cf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.379500] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1850.379500] env[69784]: value = "task-3467191" [ 1850.379500] env[69784]: _type = "Task" [ 1850.379500] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.386853] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467191, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.890406] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467191, 'name': CreateVM_Task, 'duration_secs': 0.297193} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.890703] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1850.891353] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1850.891526] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1850.891852] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1850.892271] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2bf49f0-89eb-4c7d-81eb-5ea2e2fdaded {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.896471] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Waiting for the task: (returnval){ [ 1850.896471] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]526bbea1-4536-1120-29b9-35967feabf27" [ 1850.896471] env[69784]: _type = "Task" [ 1850.896471] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.903916] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]526bbea1-4536-1120-29b9-35967feabf27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.406763] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1851.407105] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1851.407336] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1851.773784] env[69784]: DEBUG nova.compute.manager [req-11c3fe19-2683-4a3b-b79e-1792bae3ba40 req-f8a1c76c-e6c4-4718-b57b-88b3331fbdd2 service nova] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Received event network-changed-47d7a441-d972-4dc7-b547-08c26ce51fda {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1851.773984] env[69784]: DEBUG nova.compute.manager [req-11c3fe19-2683-4a3b-b79e-1792bae3ba40 req-f8a1c76c-e6c4-4718-b57b-88b3331fbdd2 service nova] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Refreshing instance network info cache due to event network-changed-47d7a441-d972-4dc7-b547-08c26ce51fda. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1851.774211] env[69784]: DEBUG oslo_concurrency.lockutils [req-11c3fe19-2683-4a3b-b79e-1792bae3ba40 req-f8a1c76c-e6c4-4718-b57b-88b3331fbdd2 service nova] Acquiring lock "refresh_cache-ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1851.774355] env[69784]: DEBUG oslo_concurrency.lockutils [req-11c3fe19-2683-4a3b-b79e-1792bae3ba40 req-f8a1c76c-e6c4-4718-b57b-88b3331fbdd2 service nova] Acquired lock "refresh_cache-ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1851.774517] env[69784]: DEBUG nova.network.neutron [req-11c3fe19-2683-4a3b-b79e-1792bae3ba40 req-f8a1c76c-e6c4-4718-b57b-88b3331fbdd2 service nova] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Refreshing network info cache for port 47d7a441-d972-4dc7-b547-08c26ce51fda {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1852.093209] env[69784]: DEBUG nova.network.neutron [req-11c3fe19-2683-4a3b-b79e-1792bae3ba40 req-f8a1c76c-e6c4-4718-b57b-88b3331fbdd2 service nova] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Updated VIF entry in instance network info cache for port 47d7a441-d972-4dc7-b547-08c26ce51fda. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1852.093209] env[69784]: DEBUG nova.network.neutron [req-11c3fe19-2683-4a3b-b79e-1792bae3ba40 req-f8a1c76c-e6c4-4718-b57b-88b3331fbdd2 service nova] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Updating instance_info_cache with network_info: [{"id": "47d7a441-d972-4dc7-b547-08c26ce51fda", "address": "fa:16:3e:7e:9c:dc", "network": {"id": "5aefb596-86e5-47b1-8255-1e57f9ae3ff0", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-959946745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5304678ef076486db412f8fb1dd50af8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd0eb882-ab95-4373-aa20-ee565a9919e3", "external-id": "nsx-vlan-transportzone-510", "segmentation_id": 510, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d7a441-d9", "ovs_interfaceid": "47d7a441-d972-4dc7-b547-08c26ce51fda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.102249] env[69784]: DEBUG oslo_concurrency.lockutils [req-11c3fe19-2683-4a3b-b79e-1792bae3ba40 req-f8a1c76c-e6c4-4718-b57b-88b3331fbdd2 service nova] Releasing lock "refresh_cache-ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1860.155189] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquiring lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1865.072880] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "c78438d5-ddaa-4858-a161-af83e6c16e54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1865.073251] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "c78438d5-ddaa-4858-a161-af83e6c16e54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1877.846066] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1877.867246] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Getting list of instances from cluster (obj){ [ 1877.867246] env[69784]: value = "domain-c8" [ 1877.867246] env[69784]: _type = "ClusterComputeResource" [ 1877.867246] env[69784]: } {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1877.868503] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944466e2-c353-4a92-9f5b-2253a759f126 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.886893] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Got total of 10 instances {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1877.887026] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 161991fb-77d5-4a18-b0f3-d2346c8d3b68 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.887248] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.887438] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 93ea7e73-f280-4e22-9ac7-f1be9926a158 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.887590] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid db704361-31ad-49a0-8aa7-01d4e3f42a3d {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.887736] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid a34a0620-ea85-4bd5-9690-c93d70ecb9ec {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.887883] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.888044] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.888200] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 7632e563-1790-442f-9e13-77f3d93e4223 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.888349] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 871e3b73-d4d8-4081-8b92-0dee212d8961 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.888493] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid ae4e1119-10e5-42fe-bb57-6bcb2c54d90b {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1877.888987] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.889090] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.889227] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.889484] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.889693] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.889901] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.890139] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.890357] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "7632e563-1790-442f-9e13-77f3d93e4223" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.890550] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "871e3b73-d4d8-4081-8b92-0dee212d8961" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.890741] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1881.839845] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1881.840274] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1885.840596] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1885.840887] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1886.840744] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1886.840744] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.840292] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.854016] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1888.854252] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1888.854420] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1888.854574] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1888.856055] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78477592-3d1f-4497-8bc9-108ed628845c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.864365] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f29019-3a26-4466-a5c6-9384efea6d71 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.877921] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44376290-3d9a-4253-81eb-2c7d72df9a62 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.884014] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687a09a6-26f0-45a1-ba95-8f64b4d5fc34 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.912830] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180956MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1888.912993] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1888.913199] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1888.991414] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.991575] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.991704] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.991826] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.992178] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.992178] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.992178] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.992324] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.992401] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.992548] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1889.003919] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 30939594-471e-4cee-a7f3-2fa62023f897 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1889.014579] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1889.027477] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1889.037439] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1889.037661] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1889.037807] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1889.211208] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73331cf5-b691-49a3-803b-7eb11df8a2f6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.218783] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a6b650-ba3b-4e37-88e5-77797be509ef {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.248296] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2a4ea3-ec07-401b-8d74-247ac5709aa8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.255400] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7854e12a-1d0e-420d-a2a8-a91298f87afe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.268424] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1889.276693] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1889.289980] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1889.290232] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.377s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1889.306892] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "0a177681-5f4e-4dc5-baee-1303be38444a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1889.307087] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0a177681-5f4e-4dc5-baee-1303be38444a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1892.291056] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1892.291056] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1892.291056] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1892.312707] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.312925] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.313115] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.313287] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.313423] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.313546] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.313666] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.313786] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.313904] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.314033] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1892.314161] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1893.859432] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.839581] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.939958] env[69784]: WARNING oslo_vmware.rw_handles [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1895.939958] env[69784]: ERROR oslo_vmware.rw_handles [ 1895.939958] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1895.943272] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1895.943272] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Copying Virtual Disk [datastore1] vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/d40b97d3-3781-46bb-9b67-595377487267/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1895.943546] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7047ca2a-593a-4256-9dc9-b726928eb82f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.951286] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Waiting for the task: (returnval){ [ 1895.951286] env[69784]: value = "task-3467192" [ 1895.951286] env[69784]: _type = "Task" [ 1895.951286] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.960113] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Task: {'id': task-3467192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.461707] env[69784]: DEBUG oslo_vmware.exceptions [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1896.461969] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1896.462644] env[69784]: ERROR nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1896.462644] env[69784]: Faults: ['InvalidArgument'] [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Traceback (most recent call last): [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] yield resources [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] self.driver.spawn(context, instance, image_meta, [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] self._fetch_image_if_missing(context, vi) [ 1896.462644] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] image_cache(vi, tmp_image_ds_loc) [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] vm_util.copy_virtual_disk( [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] session._wait_for_task(vmdk_copy_task) [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] return self.wait_for_task(task_ref) [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] return evt.wait() [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] result = hub.switch() [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1896.463034] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] return self.greenlet.switch() [ 1896.463431] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1896.463431] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] self.f(*self.args, **self.kw) [ 1896.463431] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1896.463431] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] raise exceptions.translate_fault(task_info.error) [ 1896.463431] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1896.463431] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Faults: ['InvalidArgument'] [ 1896.463431] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] [ 1896.463431] env[69784]: INFO nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Terminating instance [ 1896.464550] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1896.464787] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1896.465006] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adeb6735-8f84-4e35-b5ef-2d054c0d535e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.467282] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1896.467481] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1896.468266] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77377d15-fc72-49a0-9707-296b7265c48a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.475224] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1896.475496] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f6f345a-00d2-45e4-949f-4123b9ad15e2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.477825] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1896.477998] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1896.479150] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2e017aa-2b8a-45e0-8868-e9ae34e5979c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.485244] env[69784]: DEBUG oslo_vmware.api [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Waiting for the task: (returnval){ [ 1896.485244] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52aadd17-e631-1694-8d36-4653ceefb0fe" [ 1896.485244] env[69784]: _type = "Task" [ 1896.485244] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.491808] env[69784]: DEBUG oslo_vmware.api [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52aadd17-e631-1694-8d36-4653ceefb0fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.545985] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1896.546245] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1896.546562] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Deleting the datastore file [datastore1] 161991fb-77d5-4a18-b0f3-d2346c8d3b68 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1896.547012] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58960c22-c629-44f7-aeae-edbcd7735263 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.553916] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Waiting for the task: (returnval){ [ 1896.553916] env[69784]: value = "task-3467194" [ 1896.553916] env[69784]: _type = "Task" [ 1896.553916] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.561731] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Task: {'id': task-3467194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.994916] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1896.995217] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Creating directory with path [datastore1] vmware_temp/6458f7a4-4e74-4fa3-9a05-ad6bf7c18e25/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1896.995431] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fefc29ce-62c7-44cd-bf8e-3be88f379f70 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.006494] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Created directory with path [datastore1] vmware_temp/6458f7a4-4e74-4fa3-9a05-ad6bf7c18e25/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1897.006674] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Fetch image to [datastore1] vmware_temp/6458f7a4-4e74-4fa3-9a05-ad6bf7c18e25/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1897.006840] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/6458f7a4-4e74-4fa3-9a05-ad6bf7c18e25/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1897.007552] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d03725e-1d72-4d3b-aafe-3aa0a1686bff {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.013766] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1230d958-ed2c-440e-8c3d-d3301dc7814c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.022493] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3f7e7a-070b-4a67-b0cc-b141d1dcdffd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.052565] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807da985-cb3a-40ad-b20f-bf630f96e561 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.062804] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e696baa9-006b-4395-9bfe-d5f19aaed6af {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.064401] env[69784]: DEBUG oslo_vmware.api [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Task: {'id': task-3467194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078152} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.064629] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1897.064797] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1897.064964] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1897.065151] env[69784]: INFO nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1897.067212] env[69784]: DEBUG nova.compute.claims [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1897.067383] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1897.067592] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1897.085572] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1897.280171] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a858339-b670-4c74-bd40-4dc14c19cb9e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.287876] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7da777-6619-4c7b-8859-ec61d1ebdd6c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.316883] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1897.317667] env[69784]: ERROR nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = getattr(controller, method)(*args, **kwargs) [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._get(image_id) [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1897.317667] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] resp, body = self.http_client.get(url, headers=header) [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.request(url, 'GET', **kwargs) [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._handle_response(resp) [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exc.from_response(resp, resp.content) [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] During handling of the above exception, another exception occurred: [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1897.318082] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] yield resources [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self.driver.spawn(context, instance, image_meta, [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._fetch_image_if_missing(context, vi) [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image_fetch(context, vi, tmp_image_ds_loc) [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] images.fetch_image( [ 1897.318437] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] metadata = IMAGE_API.get(context, image_ref) [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return session.show(context, image_id, [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] _reraise_translated_image_exception(image_id) [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise new_exc.with_traceback(exc_trace) [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = getattr(controller, method)(*args, **kwargs) [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1897.318871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._get(image_id) [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] resp, body = self.http_client.get(url, headers=header) [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.request(url, 'GET', **kwargs) [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._handle_response(resp) [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exc.from_response(resp, resp.content) [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1897.319262] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1897.319575] env[69784]: INFO nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Terminating instance [ 1897.319925] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe952b37-feb5-4fb0-8055-4a478c9b1e23 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.322268] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1897.322466] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1897.323074] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1897.323263] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1897.323478] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2ab622d-a124-4afc-b4be-fa1534ec381b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.325656] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58faa76e-c15f-4f13-b508-226d23b16a59 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.332942] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51197d05-6630-4823-8858-7b934d3bd61c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.338722] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1897.338912] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1897.340091] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7010800f-df59-4da6-adb5-a7e853216583 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.351761] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1897.352183] env[69784]: DEBUG nova.compute.provider_tree [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1897.353483] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50091841-acf4-4fb2-a1c0-158df85a8221 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.355933] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Waiting for the task: (returnval){ [ 1897.355933] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]523391c1-442b-d770-e771-dae32bdd49ef" [ 1897.355933] env[69784]: _type = "Task" [ 1897.355933] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.360222] env[69784]: DEBUG nova.scheduler.client.report [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1897.366128] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]523391c1-442b-d770-e771-dae32bdd49ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.375108] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.307s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1897.375683] env[69784]: ERROR nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1897.375683] env[69784]: Faults: ['InvalidArgument'] [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Traceback (most recent call last): [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] self.driver.spawn(context, instance, image_meta, [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] self._fetch_image_if_missing(context, vi) [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] image_cache(vi, tmp_image_ds_loc) [ 1897.375683] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] vm_util.copy_virtual_disk( [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] session._wait_for_task(vmdk_copy_task) [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] return self.wait_for_task(task_ref) [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] return evt.wait() [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] result = hub.switch() [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] return self.greenlet.switch() [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1897.376031] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] self.f(*self.args, **self.kw) [ 1897.376367] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1897.376367] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] raise exceptions.translate_fault(task_info.error) [ 1897.376367] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1897.376367] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Faults: ['InvalidArgument'] [ 1897.376367] env[69784]: ERROR nova.compute.manager [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] [ 1897.376500] env[69784]: DEBUG nova.compute.utils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1897.377778] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Build of instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 was re-scheduled: A specified parameter was not correct: fileType [ 1897.377778] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1897.378161] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1897.378354] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1897.378531] env[69784]: DEBUG nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1897.378695] env[69784]: DEBUG nova.network.neutron [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1897.423325] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1897.423536] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1897.423705] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Deleting the datastore file [datastore1] 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1897.423970] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c3de7f0-a827-4861-972c-ebfda38c6fcd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.430645] env[69784]: DEBUG oslo_vmware.api [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Waiting for the task: (returnval){ [ 1897.430645] env[69784]: value = "task-3467196" [ 1897.430645] env[69784]: _type = "Task" [ 1897.430645] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.437759] env[69784]: DEBUG oslo_vmware.api [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Task: {'id': task-3467196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.870753] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1897.870887] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Creating directory with path [datastore1] vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1897.871162] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb33fd1f-f8dd-42be-a9b6-ebe0f9cee156 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.883101] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Created directory with path [datastore1] vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1897.883101] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Fetch image to [datastore1] vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1897.883101] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1897.883808] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae340edc-a995-4b1c-a7a8-dfdd2cd452a3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.892488] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4fec44-8542-48b3-b35e-678bee9401c0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.896964] env[69784]: DEBUG nova.network.neutron [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.905640] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf942efc-c0c2-4702-936a-0acb223c2c4f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.910375] env[69784]: INFO nova.compute.manager [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Took 0.53 seconds to deallocate network for instance. [ 1897.943845] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faff33fb-9740-422c-b580-7274041ae9c6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.957765] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2cff7dbf-5d66-4dc5-a47f-ff3597870ddf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.959597] env[69784]: DEBUG oslo_vmware.api [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Task: {'id': task-3467196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076663} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.960167] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1897.960285] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1897.960470] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1897.960638] env[69784]: INFO nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1897.963028] env[69784]: DEBUG nova.compute.claims [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1897.963193] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1897.963406] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1897.980484] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1898.011361] env[69784]: INFO nova.scheduler.client.report [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Deleted allocations for instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 [ 1898.036842] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cb97b9e8-ef05-49aa-a2e2-bd3880ebaa86 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 680.233s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.038017] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 484.614s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1898.038753] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Acquiring lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1898.038753] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1898.038753] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.043962] env[69784]: INFO nova.compute.manager [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Terminating instance [ 1898.045684] env[69784]: DEBUG nova.compute.manager [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1898.045921] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1898.046353] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f853afed-5c01-4744-98c9-c45cbc05ad09 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.049936] env[69784]: DEBUG oslo_vmware.rw_handles [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1898.111285] env[69784]: DEBUG nova.compute.manager [None req-f8142916-9abd-406a-96c3-b913ea7fc064 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] [instance: 30939594-471e-4cee-a7f3-2fa62023f897] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1898.118540] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bfcb66-ca81-4963-9f92-5eabd2722097 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.131737] env[69784]: DEBUG oslo_vmware.rw_handles [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1898.131913] env[69784]: DEBUG oslo_vmware.rw_handles [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1898.136896] env[69784]: DEBUG nova.compute.manager [None req-f8142916-9abd-406a-96c3-b913ea7fc064 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] [instance: 30939594-471e-4cee-a7f3-2fa62023f897] Instance disappeared before build. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1898.151881] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 161991fb-77d5-4a18-b0f3-d2346c8d3b68 could not be found. [ 1898.152096] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1898.152278] env[69784]: INFO nova.compute.manager [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Took 0.11 seconds to destroy the instance on the hypervisor. [ 1898.152525] env[69784]: DEBUG oslo.service.loopingcall [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1898.156726] env[69784]: DEBUG nova.compute.manager [-] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1898.156835] env[69784]: DEBUG nova.network.neutron [-] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1898.168264] env[69784]: DEBUG oslo_concurrency.lockutils [None req-f8142916-9abd-406a-96c3-b913ea7fc064 tempest-ServersTestMultiNic-248893282 tempest-ServersTestMultiNic-248893282-project-member] Lock "30939594-471e-4cee-a7f3-2fa62023f897" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.242s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.177625] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1898.181371] env[69784]: DEBUG nova.network.neutron [-] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.189039] env[69784]: INFO nova.compute.manager [-] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] Took 0.03 seconds to deallocate network for instance. [ 1898.240210] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1898.269238] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61eda360-a87e-4c46-bd1e-877cdc4e5e5a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.279060] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee8e5b3-96e8-4595-8a06-9d0e8728c29d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.308832] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b001090-000e-4f10-8fd0-6f112f07c9df {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.316810] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e20bfd17-3224-46ea-8a5c-ab11eb6a6493 tempest-AttachInterfacesV270Test-881782710 tempest-AttachInterfacesV270Test-881782710-project-member] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.276s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.316810] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.426s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1898.316810] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 161991fb-77d5-4a18-b0f3-d2346c8d3b68] During sync_power_state the instance has a pending task (deleting). Skip. [ 1898.316810] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "161991fb-77d5-4a18-b0f3-d2346c8d3b68" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.319072] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb9d27b-6896-45dc-b31f-2c028b4ec9d9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.332885] env[69784]: DEBUG nova.compute.provider_tree [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.340383] env[69784]: DEBUG nova.scheduler.client.report [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1898.352615] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.389s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.353320] env[69784]: ERROR nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = getattr(controller, method)(*args, **kwargs) [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._get(image_id) [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1898.353320] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] resp, body = self.http_client.get(url, headers=header) [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.request(url, 'GET', **kwargs) [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._handle_response(resp) [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exc.from_response(resp, resp.content) [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] During handling of the above exception, another exception occurred: [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.353684] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self.driver.spawn(context, instance, image_meta, [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._fetch_image_if_missing(context, vi) [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image_fetch(context, vi, tmp_image_ds_loc) [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] images.fetch_image( [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] metadata = IMAGE_API.get(context, image_ref) [ 1898.354051] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return session.show(context, image_id, [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] _reraise_translated_image_exception(image_id) [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise new_exc.with_traceback(exc_trace) [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = getattr(controller, method)(*args, **kwargs) [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._get(image_id) [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1898.354453] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] resp, body = self.http_client.get(url, headers=header) [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.request(url, 'GET', **kwargs) [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._handle_response(resp) [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exc.from_response(resp, resp.content) [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1898.354837] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.355159] env[69784]: DEBUG nova.compute.utils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1898.355159] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.115s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1898.356338] env[69784]: INFO nova.compute.claims [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1898.359013] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Build of instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe was re-scheduled: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1898.359479] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1898.359648] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1898.359802] env[69784]: DEBUG nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1898.359961] env[69784]: DEBUG nova.network.neutron [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1898.497130] env[69784]: DEBUG neutronclient.v2_0.client [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69784) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1898.498970] env[69784]: ERROR nova.compute.manager [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = getattr(controller, method)(*args, **kwargs) [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._get(image_id) [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1898.498970] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] resp, body = self.http_client.get(url, headers=header) [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.request(url, 'GET', **kwargs) [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._handle_response(resp) [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exc.from_response(resp, resp.content) [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] During handling of the above exception, another exception occurred: [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.499469] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self.driver.spawn(context, instance, image_meta, [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._fetch_image_if_missing(context, vi) [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image_fetch(context, vi, tmp_image_ds_loc) [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] images.fetch_image( [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] metadata = IMAGE_API.get(context, image_ref) [ 1898.499815] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return session.show(context, image_id, [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] _reraise_translated_image_exception(image_id) [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise new_exc.with_traceback(exc_trace) [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = getattr(controller, method)(*args, **kwargs) [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._get(image_id) [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1898.500225] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] resp, body = self.http_client.get(url, headers=header) [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.request(url, 'GET', **kwargs) [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self._handle_response(resp) [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exc.from_response(resp, resp.content) [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] nova.exception.ImageNotAuthorized: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] During handling of the above exception, another exception occurred: [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.500615] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._build_and_run_instance(context, instance, image, [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exception.RescheduledException( [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] nova.exception.RescheduledException: Build of instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe was re-scheduled: Not authorized for image a83f2316-67d7-4612-bb03-1146b6453ed4. [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] During handling of the above exception, another exception occurred: [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1898.501079] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] exception_handler_v20(status_code, error_body) [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise client_exc(message=error_message, [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Neutron server returns request_ids: ['req-f80e2740-85d8-4f2a-9ec2-63ba0b55d979'] [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] During handling of the above exception, another exception occurred: [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._deallocate_network(context, instance, requested_networks) [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self.network_api.deallocate_for_instance( [ 1898.501483] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] data = neutron.list_ports(**search_opts) [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.list('ports', self.ports_path, retrieve_all, [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] for r in self._pagination(collection, path, **params): [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] res = self.get(path, params=params) [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.501998] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.retry_request("GET", action, body=body, [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.do_request(method, action, body=body, [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._handle_fault_response(status_code, replybody, resp) [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exception.Unauthorized() [ 1898.502443] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] nova.exception.Unauthorized: Not authorized. [ 1898.502871] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.550200] env[69784]: INFO nova.scheduler.client.report [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Deleted allocations for instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe [ 1898.566717] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cb75c3-5917-4687-9511-62596b80f0e2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.569891] env[69784]: DEBUG oslo_concurrency.lockutils [None req-754343fb-d5f3-4ed9-b0eb-35090ff3f248 tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.301s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.571356] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.336s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1898.571575] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Acquiring lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1898.571778] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1898.571946] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.576071] env[69784]: INFO nova.compute.manager [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Terminating instance [ 1898.577973] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072b085a-1705-407b-936b-9db660ca7022 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.581575] env[69784]: DEBUG nova.compute.manager [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1898.581777] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1898.582239] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dba1b59f-dc95-4e08-896a-84a2a94fb483 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.616997] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1898.623295] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b591fe-a959-4b07-9836-b7a0de8a49a4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.641984] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed42d2dd-51af-4d18-8d62-98a5fa367272 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.645214] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5f7bfe-2008-4001-947a-9583de0d7306 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.657631] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe could not be found. [ 1898.657839] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1898.658033] env[69784]: INFO nova.compute.manager [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1898.658317] env[69784]: DEBUG oslo.service.loopingcall [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1898.658888] env[69784]: DEBUG nova.compute.manager [-] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1898.658982] env[69784]: DEBUG nova.network.neutron [-] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1898.668600] env[69784]: DEBUG nova.compute.provider_tree [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.680072] env[69784]: DEBUG nova.scheduler.client.report [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1898.689095] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1898.692488] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.338s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.692938] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1898.696054] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.006s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1898.697294] env[69784]: INFO nova.compute.claims [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1898.726339] env[69784]: DEBUG nova.compute.utils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1898.727837] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1898.728047] env[69784]: DEBUG nova.network.neutron [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1898.736205] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1898.793763] env[69784]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69784) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1898.794055] env[69784]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-717e5cfb-b9b2-4e8f-9aea-17d72b7df6ed'] [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1898.794883] env[69784]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1898.795516] env[69784]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1898.796069] env[69784]: ERROR oslo.service.loopingcall [ 1898.796675] env[69784]: ERROR nova.compute.manager [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1898.802875] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1898.830454] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1898.830690] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1898.830848] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1898.831037] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1898.831205] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1898.831396] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1898.831649] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1898.831816] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1898.832016] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1898.832228] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1898.832428] env[69784]: DEBUG nova.virt.hardware [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1898.833951] env[69784]: ERROR nova.compute.manager [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] exception_handler_v20(status_code, error_body) [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise client_exc(message=error_message, [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Neutron server returns request_ids: ['req-717e5cfb-b9b2-4e8f-9aea-17d72b7df6ed'] [ 1898.833951] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] During handling of the above exception, another exception occurred: [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Traceback (most recent call last): [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._delete_instance(context, instance, bdms) [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._shutdown_instance(context, instance, bdms) [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._try_deallocate_network(context, instance, requested_networks) [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] with excutils.save_and_reraise_exception(): [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1898.834341] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self.force_reraise() [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise self.value [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] _deallocate_network_with_retries() [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return evt.wait() [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = hub.switch() [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.greenlet.switch() [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1898.834826] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = func(*self.args, **self.kw) [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] result = f(*args, **kwargs) [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._deallocate_network( [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self.network_api.deallocate_for_instance( [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] data = neutron.list_ports(**search_opts) [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.list('ports', self.ports_path, retrieve_all, [ 1898.835231] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] for r in self._pagination(collection, path, **params): [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] res = self.get(path, params=params) [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.retry_request("GET", action, body=body, [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1898.835632] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] return self.do_request(method, action, body=body, [ 1898.836029] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.836029] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] ret = obj(*args, **kwargs) [ 1898.836029] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1898.836029] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] self._handle_fault_response(status_code, replybody, resp) [ 1898.836029] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1898.836029] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1898.836029] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1898.836029] env[69784]: ERROR nova.compute.manager [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] [ 1898.837317] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6576b92-c3ca-439e-b453-86f34dca94cf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.843168] env[69784]: DEBUG nova.policy [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '863e5eb3480c4cf7ae5b3109ac113718', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e91214f37d741d7935308a363541f5c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1898.850981] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d173788-e956-47c9-99fd-37e693a15d90 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.871416] env[69784]: DEBUG oslo_concurrency.lockutils [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.300s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.873379] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.984s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1898.873571] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] During sync_power_state the instance has a pending task (deleting). Skip. [ 1898.874527] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "6cdbefb3-90e0-4ea5-b41f-a094723dbdbe" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1898.919457] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c75443-a62c-4d3b-ba50-cc00d0dfd449 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.926888] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bef009-aec9-4172-b0a8-852f972816c0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.931526] env[69784]: INFO nova.compute.manager [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] [instance: 6cdbefb3-90e0-4ea5-b41f-a094723dbdbe] Successfully reverted task state from None on failure for instance. [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server [None req-6d04f637-d369-47c2-85c4-e387b47ef2ec tempest-ListImageFiltersTestJSON-2144466809 tempest-ListImageFiltersTestJSON-2144466809-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-717e5cfb-b9b2-4e8f-9aea-17d72b7df6ed'] [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1898.935301] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1898.935850] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1898.936388] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server raise self.value [ 1898.936927] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1898.937471] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1898.938077] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1898.938640] env[69784]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1898.938640] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1898.938640] env[69784]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1898.938640] env[69784]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1898.938640] env[69784]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1898.938640] env[69784]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1898.938640] env[69784]: ERROR oslo_messaging.rpc.server [ 1898.961365] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2f298e-30fb-43ad-af7c-9069201334b4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.968779] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427da210-f366-4b92-a4ba-d0a3ea8b88cb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.982352] env[69784]: DEBUG nova.compute.provider_tree [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.990924] env[69784]: DEBUG nova.scheduler.client.report [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1899.006386] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.311s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1899.006661] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1899.038376] env[69784]: DEBUG nova.compute.utils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1899.039706] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1899.039873] env[69784]: DEBUG nova.network.neutron [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1899.048894] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1899.111196] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1899.122915] env[69784]: DEBUG nova.policy [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c93b274686c34049be1b37ef70656616', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0126dda00a44838ac749dee6f266970', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1899.137962] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1899.138196] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1899.138370] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1899.138573] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1899.138702] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1899.138845] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1899.139061] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1899.139224] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1899.139387] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1899.139548] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1899.139727] env[69784]: DEBUG nova.virt.hardware [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1899.140810] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b851b689-b54b-4a6b-8fe3-4dbbc0764a72 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.150755] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df49bd4-03e7-4fb9-b372-16f0ad1cf9b2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.300353] env[69784]: DEBUG nova.network.neutron [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Successfully created port: 7b66ea43-6bb1-4cea-878f-c431f4e3f6b6 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1899.766795] env[69784]: DEBUG nova.network.neutron [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Successfully created port: 8670a703-ea6a-42fd-85ef-6dd9dcc9f604 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1900.215253] env[69784]: DEBUG nova.compute.manager [req-a4ae0db8-13ee-45a6-9568-2b5da2aa6f6b req-00ae3a85-ba9d-42af-96a5-d2b845bafc17 service nova] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Received event network-vif-plugged-7b66ea43-6bb1-4cea-878f-c431f4e3f6b6 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1900.215513] env[69784]: DEBUG oslo_concurrency.lockutils [req-a4ae0db8-13ee-45a6-9568-2b5da2aa6f6b req-00ae3a85-ba9d-42af-96a5-d2b845bafc17 service nova] Acquiring lock "4479a824-1f93-45d0-953f-57736580d86f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1900.215780] env[69784]: DEBUG oslo_concurrency.lockutils [req-a4ae0db8-13ee-45a6-9568-2b5da2aa6f6b req-00ae3a85-ba9d-42af-96a5-d2b845bafc17 service nova] Lock "4479a824-1f93-45d0-953f-57736580d86f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1900.215846] env[69784]: DEBUG oslo_concurrency.lockutils [req-a4ae0db8-13ee-45a6-9568-2b5da2aa6f6b req-00ae3a85-ba9d-42af-96a5-d2b845bafc17 service nova] Lock "4479a824-1f93-45d0-953f-57736580d86f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1900.216011] env[69784]: DEBUG nova.compute.manager [req-a4ae0db8-13ee-45a6-9568-2b5da2aa6f6b req-00ae3a85-ba9d-42af-96a5-d2b845bafc17 service nova] [instance: 4479a824-1f93-45d0-953f-57736580d86f] No waiting events found dispatching network-vif-plugged-7b66ea43-6bb1-4cea-878f-c431f4e3f6b6 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1900.216293] env[69784]: WARNING nova.compute.manager [req-a4ae0db8-13ee-45a6-9568-2b5da2aa6f6b req-00ae3a85-ba9d-42af-96a5-d2b845bafc17 service nova] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Received unexpected event network-vif-plugged-7b66ea43-6bb1-4cea-878f-c431f4e3f6b6 for instance with vm_state building and task_state spawning. [ 1900.325378] env[69784]: DEBUG nova.network.neutron [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Successfully updated port: 7b66ea43-6bb1-4cea-878f-c431f4e3f6b6 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1900.337087] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "refresh_cache-4479a824-1f93-45d0-953f-57736580d86f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1900.337248] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquired lock "refresh_cache-4479a824-1f93-45d0-953f-57736580d86f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1900.337403] env[69784]: DEBUG nova.network.neutron [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1900.407298] env[69784]: DEBUG nova.network.neutron [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1900.769949] env[69784]: DEBUG nova.network.neutron [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Updating instance_info_cache with network_info: [{"id": "7b66ea43-6bb1-4cea-878f-c431f4e3f6b6", "address": "fa:16:3e:cb:03:4c", "network": {"id": "81e149bf-ad1b-4970-8b37-685f69ec18f7", "bridge": "br-int", "label": "tempest-ServersTestJSON-918442431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e91214f37d741d7935308a363541f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b66ea43-6b", "ovs_interfaceid": "7b66ea43-6bb1-4cea-878f-c431f4e3f6b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.780968] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Releasing lock "refresh_cache-4479a824-1f93-45d0-953f-57736580d86f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1900.781336] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Instance network_info: |[{"id": "7b66ea43-6bb1-4cea-878f-c431f4e3f6b6", "address": "fa:16:3e:cb:03:4c", "network": {"id": "81e149bf-ad1b-4970-8b37-685f69ec18f7", "bridge": "br-int", "label": "tempest-ServersTestJSON-918442431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e91214f37d741d7935308a363541f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b66ea43-6b", "ovs_interfaceid": "7b66ea43-6bb1-4cea-878f-c431f4e3f6b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1900.782162] env[69784]: DEBUG nova.network.neutron [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Successfully updated port: 8670a703-ea6a-42fd-85ef-6dd9dcc9f604 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1900.784419] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:03:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a92a4ffe-7939-4697-bf98-5b22e2c7feda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b66ea43-6bb1-4cea-878f-c431f4e3f6b6', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1900.793148] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Creating folder: Project (8e91214f37d741d7935308a363541f5c). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1900.794486] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "refresh_cache-ec925fe0-8f7b-46c0-8d61-6a9cf989e798" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1900.794625] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "refresh_cache-ec925fe0-8f7b-46c0-8d61-6a9cf989e798" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1900.794768] env[69784]: DEBUG nova.network.neutron [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1900.796569] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3dc5d3a6-84bb-4f79-a732-b0cc0253b02b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.808448] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Created folder: Project (8e91214f37d741d7935308a363541f5c) in parent group-v692547. [ 1900.808631] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Creating folder: Instances. Parent ref: group-v692651. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1900.808862] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87483326-dc93-4d57-9dd2-353300001ba4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.818102] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Created folder: Instances in parent group-v692651. [ 1900.818293] env[69784]: DEBUG oslo.service.loopingcall [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1900.818478] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1900.818676] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84a0b190-88ba-4736-88e6-586a856d1c84 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.837058] env[69784]: DEBUG nova.network.neutron [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1900.840014] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1900.840014] env[69784]: value = "task-3467199" [ 1900.840014] env[69784]: _type = "Task" [ 1900.840014] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.847598] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467199, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.264775] env[69784]: DEBUG nova.network.neutron [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Updating instance_info_cache with network_info: [{"id": "8670a703-ea6a-42fd-85ef-6dd9dcc9f604", "address": "fa:16:3e:15:50:e4", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8670a703-ea", "ovs_interfaceid": "8670a703-ea6a-42fd-85ef-6dd9dcc9f604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.276568] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "refresh_cache-ec925fe0-8f7b-46c0-8d61-6a9cf989e798" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1901.276874] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Instance network_info: |[{"id": "8670a703-ea6a-42fd-85ef-6dd9dcc9f604", "address": "fa:16:3e:15:50:e4", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8670a703-ea", "ovs_interfaceid": "8670a703-ea6a-42fd-85ef-6dd9dcc9f604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1901.277280] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:50:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ff81f9-72b2-4e58-a8d8-5699907f7459', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8670a703-ea6a-42fd-85ef-6dd9dcc9f604', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1901.284934] env[69784]: DEBUG oslo.service.loopingcall [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.285412] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1901.285635] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62239704-a432-40bd-95a3-e787a9c55380 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.306051] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1901.306051] env[69784]: value = "task-3467200" [ 1901.306051] env[69784]: _type = "Task" [ 1901.306051] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.313774] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467200, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.348193] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467199, 'name': CreateVM_Task, 'duration_secs': 0.287624} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.348349] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1901.349039] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1901.349207] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1901.349525] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1901.349762] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-793e3b0f-05d4-4f25-b97f-2227e25449a8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.353911] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Waiting for the task: (returnval){ [ 1901.353911] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]524a1298-61a3-cda1-bfb4-d3a942c02d38" [ 1901.353911] env[69784]: _type = "Task" [ 1901.353911] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.363010] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]524a1298-61a3-cda1-bfb4-d3a942c02d38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.816595] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467200, 'name': CreateVM_Task, 'duration_secs': 0.265064} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.816776] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1901.817426] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1901.864065] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1901.864367] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1901.865513] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1901.865513] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1901.865513] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1901.865513] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9813d14e-b2c1-4436-9422-042132b99673 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.870418] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 1901.870418] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52d354c9-cf95-fc0c-ae33-e4164a660a71" [ 1901.870418] env[69784]: _type = "Task" [ 1901.870418] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.878461] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52d354c9-cf95-fc0c-ae33-e4164a660a71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.246621] env[69784]: DEBUG nova.compute.manager [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Received event network-changed-7b66ea43-6bb1-4cea-878f-c431f4e3f6b6 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1902.246829] env[69784]: DEBUG nova.compute.manager [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Refreshing instance network info cache due to event network-changed-7b66ea43-6bb1-4cea-878f-c431f4e3f6b6. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1902.247541] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Acquiring lock "refresh_cache-4479a824-1f93-45d0-953f-57736580d86f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1902.247541] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Acquired lock "refresh_cache-4479a824-1f93-45d0-953f-57736580d86f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1902.247541] env[69784]: DEBUG nova.network.neutron [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Refreshing network info cache for port 7b66ea43-6bb1-4cea-878f-c431f4e3f6b6 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1902.383822] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1902.383822] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1902.383822] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1902.545695] env[69784]: DEBUG nova.network.neutron [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Updated VIF entry in instance network info cache for port 7b66ea43-6bb1-4cea-878f-c431f4e3f6b6. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1902.546077] env[69784]: DEBUG nova.network.neutron [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Updating instance_info_cache with network_info: [{"id": "7b66ea43-6bb1-4cea-878f-c431f4e3f6b6", "address": "fa:16:3e:cb:03:4c", "network": {"id": "81e149bf-ad1b-4970-8b37-685f69ec18f7", "bridge": "br-int", "label": "tempest-ServersTestJSON-918442431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e91214f37d741d7935308a363541f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b66ea43-6b", "ovs_interfaceid": "7b66ea43-6bb1-4cea-878f-c431f4e3f6b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.556341] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Releasing lock "refresh_cache-4479a824-1f93-45d0-953f-57736580d86f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1902.556580] env[69784]: DEBUG nova.compute.manager [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Received event network-vif-plugged-8670a703-ea6a-42fd-85ef-6dd9dcc9f604 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1902.556776] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Acquiring lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1902.556978] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1902.557158] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1902.557324] env[69784]: DEBUG nova.compute.manager [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] No waiting events found dispatching network-vif-plugged-8670a703-ea6a-42fd-85ef-6dd9dcc9f604 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1902.557492] env[69784]: WARNING nova.compute.manager [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Received unexpected event network-vif-plugged-8670a703-ea6a-42fd-85ef-6dd9dcc9f604 for instance with vm_state building and task_state spawning. [ 1902.557653] env[69784]: DEBUG nova.compute.manager [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Received event network-changed-8670a703-ea6a-42fd-85ef-6dd9dcc9f604 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1902.557806] env[69784]: DEBUG nova.compute.manager [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Refreshing instance network info cache due to event network-changed-8670a703-ea6a-42fd-85ef-6dd9dcc9f604. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1902.558197] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Acquiring lock "refresh_cache-ec925fe0-8f7b-46c0-8d61-6a9cf989e798" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1902.558384] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Acquired lock "refresh_cache-ec925fe0-8f7b-46c0-8d61-6a9cf989e798" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1902.558557] env[69784]: DEBUG nova.network.neutron [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Refreshing network info cache for port 8670a703-ea6a-42fd-85ef-6dd9dcc9f604 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1902.786019] env[69784]: DEBUG nova.network.neutron [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Updated VIF entry in instance network info cache for port 8670a703-ea6a-42fd-85ef-6dd9dcc9f604. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1902.786455] env[69784]: DEBUG nova.network.neutron [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Updating instance_info_cache with network_info: [{"id": "8670a703-ea6a-42fd-85ef-6dd9dcc9f604", "address": "fa:16:3e:15:50:e4", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8670a703-ea", "ovs_interfaceid": "8670a703-ea6a-42fd-85ef-6dd9dcc9f604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.795475] env[69784]: DEBUG oslo_concurrency.lockutils [req-262a33ba-c2d9-41c2-9d1d-a8439dcc2938 req-fec09f74-1084-4900-a628-c7081a50eaff service nova] Releasing lock "refresh_cache-ec925fe0-8f7b-46c0-8d61-6a9cf989e798" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1931.234332] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "4479a824-1f93-45d0-953f-57736580d86f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1943.841654] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.842044] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1945.808363] env[69784]: WARNING oslo_vmware.rw_handles [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1945.808363] env[69784]: ERROR oslo_vmware.rw_handles [ 1945.808999] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1945.810895] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1945.811153] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Copying Virtual Disk [datastore1] vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/ae1aaa5e-1e49-4b20-a04f-4f406b9e90cc/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1945.811439] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff84ea36-c0a5-4f38-ae7a-ca67bdf7321c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.819562] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Waiting for the task: (returnval){ [ 1945.819562] env[69784]: value = "task-3467201" [ 1945.819562] env[69784]: _type = "Task" [ 1945.819562] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.827625] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Task: {'id': task-3467201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.839871] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.330046] env[69784]: DEBUG oslo_vmware.exceptions [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1946.330344] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1946.330923] env[69784]: ERROR nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1946.330923] env[69784]: Faults: ['InvalidArgument'] [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Traceback (most recent call last): [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] yield resources [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] self.driver.spawn(context, instance, image_meta, [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] self._fetch_image_if_missing(context, vi) [ 1946.330923] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] image_cache(vi, tmp_image_ds_loc) [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] vm_util.copy_virtual_disk( [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] session._wait_for_task(vmdk_copy_task) [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] return self.wait_for_task(task_ref) [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] return evt.wait() [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] result = hub.switch() [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1946.331474] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] return self.greenlet.switch() [ 1946.331829] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1946.331829] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] self.f(*self.args, **self.kw) [ 1946.331829] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1946.331829] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] raise exceptions.translate_fault(task_info.error) [ 1946.331829] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1946.331829] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Faults: ['InvalidArgument'] [ 1946.331829] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] [ 1946.331829] env[69784]: INFO nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Terminating instance [ 1946.332797] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1946.333013] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1946.333253] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f06a9cf-b7cd-48f9-9c70-1c748df26e3c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.335431] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1946.335630] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1946.336362] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15deb6e3-a667-414d-9e80-aa66eb042aaa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.343326] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1946.343552] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0110d9d-acc2-448a-a5b9-e26002bf2160 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.345693] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1946.345869] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1946.346808] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43e3fadf-6a98-4c50-92b5-d8f9d4ebc44b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.351211] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for the task: (returnval){ [ 1946.351211] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52437e3b-7910-b1dc-56f7-2346d2ad49ba" [ 1946.351211] env[69784]: _type = "Task" [ 1946.351211] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.358450] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52437e3b-7910-b1dc-56f7-2346d2ad49ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.407170] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1946.407380] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1946.407545] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Deleting the datastore file [datastore1] 93ea7e73-f280-4e22-9ac7-f1be9926a158 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1946.407802] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe54a4f1-9b10-4e54-ac92-741c0dc67786 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.413788] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Waiting for the task: (returnval){ [ 1946.413788] env[69784]: value = "task-3467203" [ 1946.413788] env[69784]: _type = "Task" [ 1946.413788] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.421098] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Task: {'id': task-3467203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.840161] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.860897] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1946.861165] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Creating directory with path [datastore1] vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1946.861398] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d90ae01e-65b7-47d3-8cce-d815cc268903 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.873315] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Created directory with path [datastore1] vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1946.873523] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Fetch image to [datastore1] vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1946.873703] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1946.874483] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2286fb2-95e6-49af-ad1c-483bb1a2af33 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.881377] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37af462-b46a-414d-8a82-4cc08a10cb9b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.890205] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df69a14c-7816-4c86-b626-ce576333948a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.924619] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f917cb-f83b-4eb3-a2dc-d923ffe6d5f0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.931551] env[69784]: DEBUG oslo_vmware.api [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Task: {'id': task-3467203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074535} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.933048] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1946.933242] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1946.933416] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1946.933589] env[69784]: INFO nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1946.935348] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-179fa899-3826-4a1a-9917-f55325ad4aeb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.937246] env[69784]: DEBUG nova.compute.claims [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1946.937421] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1946.937632] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1946.960545] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1947.010375] env[69784]: DEBUG oslo_vmware.rw_handles [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1947.071328] env[69784]: DEBUG oslo_vmware.rw_handles [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1947.071514] env[69784]: DEBUG oslo_vmware.rw_handles [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1947.165309] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5885c3c5-4bbe-49fc-a15e-7950401bfe79 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.172777] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04295f7f-cfc7-40d9-adfb-b9e50684b14f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.202083] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ba156e-d2cf-4bc8-b73c-05c8eab69ac9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.208793] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62854cd7-b62f-4e9e-a086-0ead3fc7487c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.221403] env[69784]: DEBUG nova.compute.provider_tree [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1947.229672] env[69784]: DEBUG nova.scheduler.client.report [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1947.242894] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1947.243457] env[69784]: ERROR nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1947.243457] env[69784]: Faults: ['InvalidArgument'] [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Traceback (most recent call last): [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] self.driver.spawn(context, instance, image_meta, [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] self._fetch_image_if_missing(context, vi) [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] image_cache(vi, tmp_image_ds_loc) [ 1947.243457] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] vm_util.copy_virtual_disk( [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] session._wait_for_task(vmdk_copy_task) [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] return self.wait_for_task(task_ref) [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] return evt.wait() [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] result = hub.switch() [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] return self.greenlet.switch() [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1947.243890] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] self.f(*self.args, **self.kw) [ 1947.244408] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1947.244408] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] raise exceptions.translate_fault(task_info.error) [ 1947.244408] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1947.244408] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Faults: ['InvalidArgument'] [ 1947.244408] env[69784]: ERROR nova.compute.manager [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] [ 1947.244408] env[69784]: DEBUG nova.compute.utils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1947.245571] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Build of instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 was re-scheduled: A specified parameter was not correct: fileType [ 1947.245571] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1947.245953] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1947.246141] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1947.246308] env[69784]: DEBUG nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1947.246472] env[69784]: DEBUG nova.network.neutron [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1947.668024] env[69784]: DEBUG nova.network.neutron [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.680109] env[69784]: INFO nova.compute.manager [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Took 0.43 seconds to deallocate network for instance. [ 1947.775686] env[69784]: INFO nova.scheduler.client.report [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Deleted allocations for instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 [ 1947.795946] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a7f3afd-bebc-4db8-a12b-57fd7305c56f tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.163s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1947.796295] env[69784]: DEBUG oslo_concurrency.lockutils [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 427.760s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1947.796510] env[69784]: DEBUG oslo_concurrency.lockutils [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Acquiring lock "93ea7e73-f280-4e22-9ac7-f1be9926a158-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1947.796719] env[69784]: DEBUG oslo_concurrency.lockutils [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1947.796953] env[69784]: DEBUG oslo_concurrency.lockutils [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1947.800769] env[69784]: INFO nova.compute.manager [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Terminating instance [ 1947.802714] env[69784]: DEBUG nova.compute.manager [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1947.802907] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1947.803249] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e4809e39-b459-4892-b1d0-8a46cedb775b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.809691] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1947.816421] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5142ee20-0afd-4ffb-b74d-5957a59343cc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.846261] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1947.846698] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 93ea7e73-f280-4e22-9ac7-f1be9926a158 could not be found. [ 1947.846908] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1947.847104] env[69784]: INFO nova.compute.manager [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1947.847371] env[69784]: DEBUG oslo.service.loopingcall [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1947.847725] env[69784]: DEBUG nova.compute.manager [-] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1947.847827] env[69784]: DEBUG nova.network.neutron [-] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1947.875405] env[69784]: DEBUG nova.network.neutron [-] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.881667] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1947.881931] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1947.884299] env[69784]: INFO nova.compute.claims [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1947.887292] env[69784]: INFO nova.compute.manager [-] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] Took 0.04 seconds to deallocate network for instance. [ 1947.984321] env[69784]: DEBUG oslo_concurrency.lockutils [None req-db4f857f-bea7-4ead-a510-fb6a35428380 tempest-ServerAddressesTestJSON-773973040 tempest-ServerAddressesTestJSON-773973040-project-member] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1947.985859] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 70.097s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1947.986806] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 93ea7e73-f280-4e22-9ac7-f1be9926a158] During sync_power_state the instance has a pending task (deleting). Skip. [ 1947.986806] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "93ea7e73-f280-4e22-9ac7-f1be9926a158" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1948.068795] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1948.084679] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c88ec06-9617-4860-8043-c931c5138d54 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.094155] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0666f09f-074d-4de6-a76e-ce8af95c7348 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.123615] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18824d30-e4d7-4175-9e5c-d6e59fb5a96d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.130730] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebbdac8-b587-463a-a820-ca797dd673b4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.143499] env[69784]: DEBUG nova.compute.provider_tree [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1948.152416] env[69784]: DEBUG nova.scheduler.client.report [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1948.165570] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.284s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1948.166033] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1948.199393] env[69784]: DEBUG nova.compute.utils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1948.200640] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1948.200831] env[69784]: DEBUG nova.network.neutron [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1948.209030] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1948.258057] env[69784]: DEBUG nova.policy [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6552a441b39442db22371e84b909061', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a606bffcb6eb43c8a554793617386555', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1948.271165] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1948.295271] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1948.295526] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1948.295719] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1948.295904] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1948.296136] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1948.296296] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1948.296504] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1948.296664] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1948.296830] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1948.296994] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1948.297183] env[69784]: DEBUG nova.virt.hardware [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1948.298017] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4090896-d3ac-4cf9-8144-dc6dd4e0882e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.305947] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4764ada4-3c63-452e-a132-c14bdfe99976 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.593293] env[69784]: DEBUG nova.network.neutron [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Successfully created port: 1546b40b-7582-47e6-9640-01be826e30b9 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1948.839191] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1949.304027] env[69784]: DEBUG nova.network.neutron [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Successfully updated port: 1546b40b-7582-47e6-9640-01be826e30b9 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1949.315658] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "refresh_cache-c78438d5-ddaa-4858-a161-af83e6c16e54" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1949.315864] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquired lock "refresh_cache-c78438d5-ddaa-4858-a161-af83e6c16e54" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1949.315959] env[69784]: DEBUG nova.network.neutron [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1949.356716] env[69784]: DEBUG nova.network.neutron [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1949.505015] env[69784]: DEBUG nova.network.neutron [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Updating instance_info_cache with network_info: [{"id": "1546b40b-7582-47e6-9640-01be826e30b9", "address": "fa:16:3e:95:a3:bc", "network": {"id": "20eb192d-4429-4b17-89d3-153745821175", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1252408980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a606bffcb6eb43c8a554793617386555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1546b40b-75", "ovs_interfaceid": "1546b40b-7582-47e6-9640-01be826e30b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.517745] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Releasing lock "refresh_cache-c78438d5-ddaa-4858-a161-af83e6c16e54" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1949.518030] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Instance network_info: |[{"id": "1546b40b-7582-47e6-9640-01be826e30b9", "address": "fa:16:3e:95:a3:bc", "network": {"id": "20eb192d-4429-4b17-89d3-153745821175", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1252408980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a606bffcb6eb43c8a554793617386555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1546b40b-75", "ovs_interfaceid": "1546b40b-7582-47e6-9640-01be826e30b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1949.518553] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:a3:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1546b40b-7582-47e6-9640-01be826e30b9', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1949.526048] env[69784]: DEBUG oslo.service.loopingcall [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1949.526474] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1949.526694] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3cd84d5-de65-4a1b-892d-944bd2b521de {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.546700] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1949.546700] env[69784]: value = "task-3467204" [ 1949.546700] env[69784]: _type = "Task" [ 1949.546700] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.554235] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467204, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.709409] env[69784]: DEBUG nova.compute.manager [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Received event network-vif-plugged-1546b40b-7582-47e6-9640-01be826e30b9 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1949.709518] env[69784]: DEBUG oslo_concurrency.lockutils [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] Acquiring lock "c78438d5-ddaa-4858-a161-af83e6c16e54-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1949.709736] env[69784]: DEBUG oslo_concurrency.lockutils [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] Lock "c78438d5-ddaa-4858-a161-af83e6c16e54-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1949.709929] env[69784]: DEBUG oslo_concurrency.lockutils [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] Lock "c78438d5-ddaa-4858-a161-af83e6c16e54-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1949.710129] env[69784]: DEBUG nova.compute.manager [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] No waiting events found dispatching network-vif-plugged-1546b40b-7582-47e6-9640-01be826e30b9 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1949.710315] env[69784]: WARNING nova.compute.manager [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Received unexpected event network-vif-plugged-1546b40b-7582-47e6-9640-01be826e30b9 for instance with vm_state building and task_state spawning. [ 1949.710482] env[69784]: DEBUG nova.compute.manager [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Received event network-changed-1546b40b-7582-47e6-9640-01be826e30b9 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1949.710679] env[69784]: DEBUG nova.compute.manager [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Refreshing instance network info cache due to event network-changed-1546b40b-7582-47e6-9640-01be826e30b9. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1949.710856] env[69784]: DEBUG oslo_concurrency.lockutils [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] Acquiring lock "refresh_cache-c78438d5-ddaa-4858-a161-af83e6c16e54" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1949.710997] env[69784]: DEBUG oslo_concurrency.lockutils [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] Acquired lock "refresh_cache-c78438d5-ddaa-4858-a161-af83e6c16e54" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1949.711165] env[69784]: DEBUG nova.network.neutron [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Refreshing network info cache for port 1546b40b-7582-47e6-9640-01be826e30b9 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1949.839812] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1949.851317] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1949.851521] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1949.851690] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1949.851866] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1949.852963] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad21cef5-e836-4060-8d6f-9150226d9492 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.861166] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775611a0-2e66-433b-90ac-fe6a288d32ee {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.874897] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2d0617-3417-4b41-9195-e853e68fcdcf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.880879] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d72523-939d-49c3-9ea4-2f0a04e6635f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.913073] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180888MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1949.913217] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1949.913410] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1949.988841] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.989009] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.989149] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.989341] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.989475] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.989590] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.989704] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.989912] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.990113] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.990234] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1949.999395] env[69784]: DEBUG nova.network.neutron [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Updated VIF entry in instance network info cache for port 1546b40b-7582-47e6-9640-01be826e30b9. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1949.999743] env[69784]: DEBUG nova.network.neutron [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Updating instance_info_cache with network_info: [{"id": "1546b40b-7582-47e6-9640-01be826e30b9", "address": "fa:16:3e:95:a3:bc", "network": {"id": "20eb192d-4429-4b17-89d3-153745821175", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1252408980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a606bffcb6eb43c8a554793617386555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1546b40b-75", "ovs_interfaceid": "1546b40b-7582-47e6-9640-01be826e30b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.001964] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1950.002210] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1950.002359] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1950.009657] env[69784]: DEBUG oslo_concurrency.lockutils [req-984f3355-177f-4030-8917-b16021b909e4 req-76bf2a94-be44-4a74-8043-f3c94623c0e9 service nova] Releasing lock "refresh_cache-c78438d5-ddaa-4858-a161-af83e6c16e54" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1950.058585] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467204, 'name': CreateVM_Task, 'duration_secs': 0.280011} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.058762] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1950.065607] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1950.065777] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1950.066110] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1950.066357] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53e10a83-a2ea-4918-b68c-4e8f74e07c69 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.071324] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for the task: (returnval){ [ 1950.071324] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]523b0f00-f129-635f-5c6a-64e0ab44c74c" [ 1950.071324] env[69784]: _type = "Task" [ 1950.071324] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.082213] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]523b0f00-f129-635f-5c6a-64e0ab44c74c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.137429] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6858da-9ec2-40b8-a271-18ce1b4c6984 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.144575] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c71a320-1c09-4e63-8e3f-f3a5b580593d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.173498] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1164bf75-f5f8-42cf-a14e-f592d9a723fa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.182821] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f38282-4dfa-4aa4-ac93-f3b96d02c8a7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.198495] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1950.207741] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1950.220771] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1950.220960] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.308s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1950.584639] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1950.584639] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1950.584639] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1953.221614] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.222045] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1953.222045] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1953.241424] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243063] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243063] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243063] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243063] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243063] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243294] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243294] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243294] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243294] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1953.243294] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1954.856309] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.834464] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.856913] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.630218] env[69784]: WARNING oslo_vmware.rw_handles [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1996.630218] env[69784]: ERROR oslo_vmware.rw_handles [ 1996.631106] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1996.633392] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1996.633762] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Copying Virtual Disk [datastore1] vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/913cf1da-d120-4c4d-82e6-69f6a17d0c22/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1996.634229] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4847073-a383-4505-994a-c442765669bc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.644680] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for the task: (returnval){ [ 1996.644680] env[69784]: value = "task-3467205" [ 1996.644680] env[69784]: _type = "Task" [ 1996.644680] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.654162] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': task-3467205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.155363] env[69784]: DEBUG oslo_vmware.exceptions [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1997.155679] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1997.156275] env[69784]: ERROR nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1997.156275] env[69784]: Faults: ['InvalidArgument'] [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Traceback (most recent call last): [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] yield resources [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] self.driver.spawn(context, instance, image_meta, [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] self._fetch_image_if_missing(context, vi) [ 1997.156275] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] image_cache(vi, tmp_image_ds_loc) [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] vm_util.copy_virtual_disk( [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] session._wait_for_task(vmdk_copy_task) [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] return self.wait_for_task(task_ref) [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] return evt.wait() [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] result = hub.switch() [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1997.156685] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] return self.greenlet.switch() [ 1997.157175] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1997.157175] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] self.f(*self.args, **self.kw) [ 1997.157175] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1997.157175] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] raise exceptions.translate_fault(task_info.error) [ 1997.157175] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1997.157175] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Faults: ['InvalidArgument'] [ 1997.157175] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] [ 1997.157175] env[69784]: INFO nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Terminating instance [ 1997.158300] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1997.158547] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1997.158758] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-679ba79f-d2b2-4020-980a-7d6f19463f89 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.161161] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1997.161370] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1997.162128] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad1efc9-5a97-4d39-b1b7-5aee6372eb05 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.169237] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1997.169466] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4adc44eb-c343-496c-91f3-c14313aade5e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.171739] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1997.171911] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1997.172949] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d70c36f-2349-4b0e-8c01-9699a6190f09 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.177603] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Waiting for the task: (returnval){ [ 1997.177603] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52335b34-64a6-72c1-8858-099872d4e02d" [ 1997.177603] env[69784]: _type = "Task" [ 1997.177603] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.190924] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52335b34-64a6-72c1-8858-099872d4e02d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.239143] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1997.239401] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1997.239590] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Deleting the datastore file [datastore1] db704361-31ad-49a0-8aa7-01d4e3f42a3d {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1997.239864] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a73dd85-b904-4501-9e18-1971f7783d39 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.246180] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for the task: (returnval){ [ 1997.246180] env[69784]: value = "task-3467207" [ 1997.246180] env[69784]: _type = "Task" [ 1997.246180] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.253897] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': task-3467207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.687815] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1997.688205] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Creating directory with path [datastore1] vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1997.688331] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a71af95-a922-47b7-9f38-87696d5932a8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.701357] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Created directory with path [datastore1] vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1997.701530] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Fetch image to [datastore1] vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1997.701695] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1997.702434] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c54374-5992-4d5a-82e7-9b7260bf1038 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.708652] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541f51f9-5c49-4a90-88ae-4d85383273e3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.717435] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34399daf-3183-4a86-8084-1e9e8e7e79c5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.750270] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af64f51-480a-42d5-a5f8-493cfde026d9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.757196] env[69784]: DEBUG oslo_vmware.api [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': task-3467207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081918} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.758612] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1997.758801] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1997.758974] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1997.759163] env[69784]: INFO nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1997.760865] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c2f51542-2dcb-46e2-a650-b6a0a55f635f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.762774] env[69784]: DEBUG nova.compute.claims [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1997.762947] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1997.763171] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1997.785497] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1997.844113] env[69784]: DEBUG oslo_vmware.rw_handles [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1997.904118] env[69784]: DEBUG oslo_vmware.rw_handles [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1997.904472] env[69784]: DEBUG oslo_vmware.rw_handles [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1998.002253] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6093225-a8fa-45cc-bb3d-40fe20c4b0c4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.009824] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1683e8fe-3603-4731-b25b-cb4cd39d8f03 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.039751] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb49e3c-562a-4da5-b289-ec6ab6848754 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.046325] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9108d2cf-4fad-4c23-88c2-ea4c569b147a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.058881] env[69784]: DEBUG nova.compute.provider_tree [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1998.068818] env[69784]: DEBUG nova.scheduler.client.report [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1998.082812] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.320s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1998.083833] env[69784]: ERROR nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1998.083833] env[69784]: Faults: ['InvalidArgument'] [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Traceback (most recent call last): [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] self.driver.spawn(context, instance, image_meta, [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] self._fetch_image_if_missing(context, vi) [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] image_cache(vi, tmp_image_ds_loc) [ 1998.083833] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] vm_util.copy_virtual_disk( [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] session._wait_for_task(vmdk_copy_task) [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] return self.wait_for_task(task_ref) [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] return evt.wait() [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] result = hub.switch() [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] return self.greenlet.switch() [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1998.084264] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] self.f(*self.args, **self.kw) [ 1998.084614] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1998.084614] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] raise exceptions.translate_fault(task_info.error) [ 1998.084614] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1998.084614] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Faults: ['InvalidArgument'] [ 1998.084614] env[69784]: ERROR nova.compute.manager [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] [ 1998.084614] env[69784]: DEBUG nova.compute.utils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1998.085541] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Build of instance db704361-31ad-49a0-8aa7-01d4e3f42a3d was re-scheduled: A specified parameter was not correct: fileType [ 1998.085541] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1998.085910] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1998.086850] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1998.086850] env[69784]: DEBUG nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1998.086850] env[69784]: DEBUG nova.network.neutron [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1998.448631] env[69784]: DEBUG nova.network.neutron [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.464599] env[69784]: INFO nova.compute.manager [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Took 0.38 seconds to deallocate network for instance. [ 1998.553626] env[69784]: INFO nova.scheduler.client.report [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Deleted allocations for instance db704361-31ad-49a0-8aa7-01d4e3f42a3d [ 1998.576074] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2337f69e-5eeb-4d84-a046-64b090621230 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 528.053s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1998.576785] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 332.076s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1998.577029] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1998.577264] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1998.577479] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1998.580016] env[69784]: INFO nova.compute.manager [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Terminating instance [ 1998.581529] env[69784]: DEBUG nova.compute.manager [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1998.581804] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1998.582480] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b608895-0150-47a0-b31f-32a77378ea88 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.588720] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1998.596046] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c14d80-c58f-48e3-86fb-4aaf2376da80 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.625828] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db704361-31ad-49a0-8aa7-01d4e3f42a3d could not be found. [ 1998.626049] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1998.626239] env[69784]: INFO nova.compute.manager [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1998.626489] env[69784]: DEBUG oslo.service.loopingcall [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1998.627123] env[69784]: DEBUG nova.compute.manager [-] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1998.627123] env[69784]: DEBUG nova.network.neutron [-] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1998.648021] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1998.648299] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1998.649930] env[69784]: INFO nova.compute.claims [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1998.653261] env[69784]: DEBUG nova.network.neutron [-] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.663071] env[69784]: INFO nova.compute.manager [-] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] Took 0.04 seconds to deallocate network for instance. [ 1998.762414] env[69784]: DEBUG oslo_concurrency.lockutils [None req-86682d25-7dcb-4281-bf98-4fcc3fd00c62 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1998.767020] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 120.874s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1998.767020] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: db704361-31ad-49a0-8aa7-01d4e3f42a3d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1998.767020] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "db704361-31ad-49a0-8aa7-01d4e3f42a3d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1998.841875] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9879c9e8-978f-44d6-88ae-e6ccd48551f9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.849911] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659a189b-f9a4-4907-b611-414b0d2ca218 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.879951] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd24c455-8a12-4b8c-be51-13635597dc21 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.886819] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854ad89d-d66e-454a-8994-f2e069fe132e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.899742] env[69784]: DEBUG nova.compute.provider_tree [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1998.910795] env[69784]: DEBUG nova.scheduler.client.report [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1998.927140] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.279s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1998.927569] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1998.964671] env[69784]: DEBUG nova.compute.utils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1998.965973] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1998.966202] env[69784]: DEBUG nova.network.neutron [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1998.974265] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1999.039786] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1999.061362] env[69784]: DEBUG nova.policy [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd3000955aac4413b0343029bb134280', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b532e382791a418081b96b564cdc6100', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 1999.068991] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1999.069290] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1999.069388] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1999.069616] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1999.069787] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1999.069936] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1999.070183] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1999.070318] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1999.070486] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1999.070647] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1999.070810] env[69784]: DEBUG nova.virt.hardware [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1999.071707] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea38144e-2854-4121-918d-dcd241413c44 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.079905] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2537d2ff-fa2e-411b-bf72-2f7932c32bf2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.370808] env[69784]: DEBUG nova.network.neutron [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Successfully created port: da15600a-6d36-4809-a9f7-91b7472e9047 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1999.997249] env[69784]: DEBUG nova.network.neutron [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Successfully updated port: da15600a-6d36-4809-a9f7-91b7472e9047 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2000.015510] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "refresh_cache-0a177681-5f4e-4dc5-baee-1303be38444a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2000.015661] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquired lock "refresh_cache-0a177681-5f4e-4dc5-baee-1303be38444a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2000.015832] env[69784]: DEBUG nova.network.neutron [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2000.084806] env[69784]: DEBUG nova.network.neutron [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2000.268027] env[69784]: DEBUG nova.network.neutron [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Updating instance_info_cache with network_info: [{"id": "da15600a-6d36-4809-a9f7-91b7472e9047", "address": "fa:16:3e:92:17:9d", "network": {"id": "859dfdd9-b780-4d6b-ac6f-16003fab4582", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-294350676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b532e382791a418081b96b564cdc6100", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6076d24d-3c8e-4bbb-ba96-a08fb27a73cc", "external-id": "nsx-vlan-transportzone-267", "segmentation_id": 267, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda15600a-6d", "ovs_interfaceid": "da15600a-6d36-4809-a9f7-91b7472e9047", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.280534] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Releasing lock "refresh_cache-0a177681-5f4e-4dc5-baee-1303be38444a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2000.280839] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Instance network_info: |[{"id": "da15600a-6d36-4809-a9f7-91b7472e9047", "address": "fa:16:3e:92:17:9d", "network": {"id": "859dfdd9-b780-4d6b-ac6f-16003fab4582", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-294350676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b532e382791a418081b96b564cdc6100", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6076d24d-3c8e-4bbb-ba96-a08fb27a73cc", "external-id": "nsx-vlan-transportzone-267", "segmentation_id": 267, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda15600a-6d", "ovs_interfaceid": "da15600a-6d36-4809-a9f7-91b7472e9047", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2000.281301] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:17:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6076d24d-3c8e-4bbb-ba96-a08fb27a73cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da15600a-6d36-4809-a9f7-91b7472e9047', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2000.289427] env[69784]: DEBUG oslo.service.loopingcall [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2000.289542] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2000.289750] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98c67711-1da9-4b38-abd3-57ba365efa30 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.311041] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2000.311041] env[69784]: value = "task-3467208" [ 2000.311041] env[69784]: _type = "Task" [ 2000.311041] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.318493] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467208, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.518209] env[69784]: DEBUG nova.compute.manager [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Received event network-vif-plugged-da15600a-6d36-4809-a9f7-91b7472e9047 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2000.518463] env[69784]: DEBUG oslo_concurrency.lockutils [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] Acquiring lock "0a177681-5f4e-4dc5-baee-1303be38444a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2000.518673] env[69784]: DEBUG oslo_concurrency.lockutils [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] Lock "0a177681-5f4e-4dc5-baee-1303be38444a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2000.518959] env[69784]: DEBUG oslo_concurrency.lockutils [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] Lock "0a177681-5f4e-4dc5-baee-1303be38444a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2000.519221] env[69784]: DEBUG nova.compute.manager [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] No waiting events found dispatching network-vif-plugged-da15600a-6d36-4809-a9f7-91b7472e9047 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2000.519446] env[69784]: WARNING nova.compute.manager [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Received unexpected event network-vif-plugged-da15600a-6d36-4809-a9f7-91b7472e9047 for instance with vm_state building and task_state spawning. [ 2000.519712] env[69784]: DEBUG nova.compute.manager [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Received event network-changed-da15600a-6d36-4809-a9f7-91b7472e9047 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2000.519970] env[69784]: DEBUG nova.compute.manager [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Refreshing instance network info cache due to event network-changed-da15600a-6d36-4809-a9f7-91b7472e9047. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2000.520630] env[69784]: DEBUG oslo_concurrency.lockutils [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] Acquiring lock "refresh_cache-0a177681-5f4e-4dc5-baee-1303be38444a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2000.520913] env[69784]: DEBUG oslo_concurrency.lockutils [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] Acquired lock "refresh_cache-0a177681-5f4e-4dc5-baee-1303be38444a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2000.521188] env[69784]: DEBUG nova.network.neutron [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Refreshing network info cache for port da15600a-6d36-4809-a9f7-91b7472e9047 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2000.782322] env[69784]: DEBUG nova.network.neutron [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Updated VIF entry in instance network info cache for port da15600a-6d36-4809-a9f7-91b7472e9047. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2000.782702] env[69784]: DEBUG nova.network.neutron [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Updating instance_info_cache with network_info: [{"id": "da15600a-6d36-4809-a9f7-91b7472e9047", "address": "fa:16:3e:92:17:9d", "network": {"id": "859dfdd9-b780-4d6b-ac6f-16003fab4582", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-294350676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b532e382791a418081b96b564cdc6100", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6076d24d-3c8e-4bbb-ba96-a08fb27a73cc", "external-id": "nsx-vlan-transportzone-267", "segmentation_id": 267, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda15600a-6d", "ovs_interfaceid": "da15600a-6d36-4809-a9f7-91b7472e9047", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.793542] env[69784]: DEBUG oslo_concurrency.lockutils [req-6cca8ceb-3b85-4a95-b8a1-388d26c7df1c req-1d9a80e7-8781-4db6-b14b-6523ee3e37cb service nova] Releasing lock "refresh_cache-0a177681-5f4e-4dc5-baee-1303be38444a" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2000.820967] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467208, 'name': CreateVM_Task, 'duration_secs': 0.268902} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.821159] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2000.821795] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2000.821992] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2000.822326] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2000.822578] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad2b8878-3637-4849-bfb2-e21647f42c9e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.827169] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for the task: (returnval){ [ 2000.827169] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]524113f9-f1a5-d367-93e2-2be3c56152e1" [ 2000.827169] env[69784]: _type = "Task" [ 2000.827169] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.835927] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]524113f9-f1a5-d367-93e2-2be3c56152e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.337484] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2001.337753] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2001.337962] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2005.839625] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.839921] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.840037] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2007.840365] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2008.839204] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2010.840101] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2010.840101] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2010.856623] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2010.856848] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2010.857022] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2010.857183] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2010.858281] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e510ba-3663-4c7d-90dc-2ef573508063 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.868453] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e9c4a4-21a6-409a-80f4-d9e3068f464d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.882402] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23895cae-ad6f-42aa-b76d-8ac0b5f2cebe {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.888546] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbea3c24-de57-4367-8884-98c234e9a502 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.917189] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180959MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2010.918708] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2010.918708] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2011.035107] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.035273] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.035402] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.035524] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.035643] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.035759] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.035873] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.035987] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.036115] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.036228] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.036429] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2011.036560] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2011.154157] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9e4cc0-ebca-453c-a7c7-88a83ffda73e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.161756] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90885da7-7e9c-40dd-b304-dc925d4e6d00 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.192206] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4f2072-0662-4531-8dc7-83d20e2bd5cf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.199143] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa05e8a-300a-45f3-a714-e10feaa2bd00 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.211815] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2011.219479] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2011.243468] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2011.243650] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.326s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2014.953094] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "e35ffffb-c0fd-4236-b489-80eb0fdb4e37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2014.953569] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "e35ffffb-c0fd-4236-b489-80eb0fdb4e37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2015.238974] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.239269] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.239427] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2015.239548] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2015.263393] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.263612] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.263817] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.264017] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.264222] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.264416] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.264609] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.264800] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.264987] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.265188] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.265377] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2017.839855] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.724463] env[69784]: WARNING oslo_vmware.rw_handles [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2043.724463] env[69784]: ERROR oslo_vmware.rw_handles [ 2043.725080] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2043.726953] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2043.727191] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Copying Virtual Disk [datastore1] vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/a7d0e1e8-7f5b-431b-ba41-2bdc2350cd6b/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2043.727476] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b306f0e4-d2ca-4020-89e8-bc7f3ea06b37 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.736137] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Waiting for the task: (returnval){ [ 2043.736137] env[69784]: value = "task-3467209" [ 2043.736137] env[69784]: _type = "Task" [ 2043.736137] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.743661] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Task: {'id': task-3467209, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.246523] env[69784]: DEBUG oslo_vmware.exceptions [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2044.246800] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2044.247355] env[69784]: ERROR nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2044.247355] env[69784]: Faults: ['InvalidArgument'] [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Traceback (most recent call last): [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] yield resources [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] self.driver.spawn(context, instance, image_meta, [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] self._fetch_image_if_missing(context, vi) [ 2044.247355] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] image_cache(vi, tmp_image_ds_loc) [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] vm_util.copy_virtual_disk( [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] session._wait_for_task(vmdk_copy_task) [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] return self.wait_for_task(task_ref) [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] return evt.wait() [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] result = hub.switch() [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2044.247667] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] return self.greenlet.switch() [ 2044.247962] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2044.247962] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] self.f(*self.args, **self.kw) [ 2044.247962] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2044.247962] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] raise exceptions.translate_fault(task_info.error) [ 2044.247962] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2044.247962] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Faults: ['InvalidArgument'] [ 2044.247962] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] [ 2044.247962] env[69784]: INFO nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Terminating instance [ 2044.249170] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2044.249378] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2044.249654] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98e56d43-629b-40e5-a6c4-5ccba741528f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.251937] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2044.252151] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2044.252853] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96dafec5-0445-4b6b-b095-a6966840dd58 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.259257] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2044.259480] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70d6b98b-2b64-4f7a-a0d2-cded4ca1333a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.261516] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2044.261711] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2044.262649] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-033a823d-b0b6-4a7f-aee8-286335d8f209 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.268499] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for the task: (returnval){ [ 2044.268499] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]529c18c3-8bf5-dcf9-a0de-ee669160eb74" [ 2044.268499] env[69784]: _type = "Task" [ 2044.268499] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.278416] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]529c18c3-8bf5-dcf9-a0de-ee669160eb74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.328251] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2044.328457] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2044.328634] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Deleting the datastore file [datastore1] a34a0620-ea85-4bd5-9690-c93d70ecb9ec {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2044.328884] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0454a22b-3a52-4128-9b5b-b18eca6ac208 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.335132] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Waiting for the task: (returnval){ [ 2044.335132] env[69784]: value = "task-3467211" [ 2044.335132] env[69784]: _type = "Task" [ 2044.335132] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.342179] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Task: {'id': task-3467211, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.779402] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2044.779737] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Creating directory with path [datastore1] vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2044.779878] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5dc7864-4dd5-4ef3-bcb2-6a77852435e0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.791084] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Created directory with path [datastore1] vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2044.791291] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Fetch image to [datastore1] vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2044.791460] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2044.792200] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c1c01c-ceef-4b50-b0ca-01dd94e75b29 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.798776] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9e9740-4125-4997-b5a0-411b6c049edd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.807588] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50793a29-5b9c-4012-a1b6-28b37e942831 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.840084] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd306ca-b63a-4c74-876d-1f4b781c30f5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.846750] env[69784]: DEBUG oslo_vmware.api [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Task: {'id': task-3467211, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074821} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.848139] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2044.848326] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2044.848498] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2044.848672] env[69784]: INFO nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2044.850377] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-be5f3c3f-a715-4fef-a411-095d5b567ed4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.852231] env[69784]: DEBUG nova.compute.claims [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2044.852406] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2044.852623] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2044.875763] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2044.929313] env[69784]: DEBUG oslo_vmware.rw_handles [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2044.990280] env[69784]: DEBUG oslo_vmware.rw_handles [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2044.990469] env[69784]: DEBUG oslo_vmware.rw_handles [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2045.087267] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b8d27a-6278-4769-a110-ff7071d1fa3f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.096367] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a580d8-278f-4689-8eb1-94e989d3fea8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.125198] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fe8190-94f1-4d48-8db1-404e3e5784af {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.132089] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f50441d-903f-48b4-80a9-7e1e949ebd45 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.144738] env[69784]: DEBUG nova.compute.provider_tree [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2045.154027] env[69784]: DEBUG nova.scheduler.client.report [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2045.170922] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.318s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2045.171137] env[69784]: ERROR nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2045.171137] env[69784]: Faults: ['InvalidArgument'] [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Traceback (most recent call last): [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] self.driver.spawn(context, instance, image_meta, [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] self._fetch_image_if_missing(context, vi) [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] image_cache(vi, tmp_image_ds_loc) [ 2045.171137] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] vm_util.copy_virtual_disk( [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] session._wait_for_task(vmdk_copy_task) [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] return self.wait_for_task(task_ref) [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] return evt.wait() [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] result = hub.switch() [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] return self.greenlet.switch() [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2045.171568] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] self.f(*self.args, **self.kw) [ 2045.171876] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2045.171876] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] raise exceptions.translate_fault(task_info.error) [ 2045.171876] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2045.171876] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Faults: ['InvalidArgument'] [ 2045.171876] env[69784]: ERROR nova.compute.manager [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] [ 2045.171876] env[69784]: DEBUG nova.compute.utils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2045.173365] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Build of instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec was re-scheduled: A specified parameter was not correct: fileType [ 2045.173365] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2045.173734] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2045.173928] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2045.174123] env[69784]: DEBUG nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2045.174289] env[69784]: DEBUG nova.network.neutron [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2045.525022] env[69784]: DEBUG nova.network.neutron [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.536659] env[69784]: INFO nova.compute.manager [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Took 0.36 seconds to deallocate network for instance. [ 2045.626215] env[69784]: INFO nova.scheduler.client.report [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Deleted allocations for instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec [ 2045.648939] env[69784]: DEBUG oslo_concurrency.lockutils [None req-2dbc42a7-5dfb-471b-81f3-71b23266685a tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 557.571s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2045.650155] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 361.731s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2045.650631] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Acquiring lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2045.650631] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2045.650716] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2045.652970] env[69784]: INFO nova.compute.manager [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Terminating instance [ 2045.655106] env[69784]: DEBUG nova.compute.manager [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2045.655305] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2045.655558] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4fce7134-ecaf-4aed-9b2e-f6ffb1bb255c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.663715] env[69784]: DEBUG nova.compute.manager [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2045.669354] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e442b2-d129-4c85-b851-1da086e9e1b3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.699827] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a34a0620-ea85-4bd5-9690-c93d70ecb9ec could not be found. [ 2045.699968] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2045.700306] env[69784]: INFO nova.compute.manager [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2045.700402] env[69784]: DEBUG oslo.service.loopingcall [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2045.700640] env[69784]: DEBUG nova.compute.manager [-] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2045.700741] env[69784]: DEBUG nova.network.neutron [-] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2045.719592] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2045.719833] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2045.721304] env[69784]: INFO nova.compute.claims [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2045.735336] env[69784]: DEBUG nova.network.neutron [-] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.751708] env[69784]: INFO nova.compute.manager [-] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] Took 0.05 seconds to deallocate network for instance. [ 2045.850946] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8cd5d185-2b46-47a4-8a01-bc2989ba2f8b tempest-ServerTagsTestJSON-109561457 tempest-ServerTagsTestJSON-109561457-project-member] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2045.852054] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 167.962s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2045.852256] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: a34a0620-ea85-4bd5-9690-c93d70ecb9ec] During sync_power_state the instance has a pending task (deleting). Skip. [ 2045.852433] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "a34a0620-ea85-4bd5-9690-c93d70ecb9ec" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2045.896179] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3afd4859-26dc-4eb5-8e3f-4a52d3461870 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.904046] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4be962-7b99-4001-a79f-7f95c125d985 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.933563] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7de5e9-1c1a-4c9f-b3a1-c3a0d64e8a77 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.940542] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e1d1ea-9369-4689-93a9-6c80282c4794 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.955566] env[69784]: DEBUG nova.compute.provider_tree [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2045.963879] env[69784]: DEBUG nova.scheduler.client.report [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2045.978108] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.258s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2045.978553] env[69784]: DEBUG nova.compute.manager [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2046.010888] env[69784]: DEBUG nova.compute.utils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2046.012738] env[69784]: DEBUG nova.compute.manager [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2046.013034] env[69784]: DEBUG nova.network.neutron [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2046.021908] env[69784]: DEBUG nova.compute.manager [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2046.078528] env[69784]: DEBUG nova.policy [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed425f078af549e9b7a61cf53066bb1e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a9ca6c244dd4ce59b9938547e24c7db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 2046.089419] env[69784]: DEBUG nova.compute.manager [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2046.114584] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2046.114896] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2046.114956] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2046.115148] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2046.115295] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2046.115439] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2046.115642] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2046.115806] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2046.116024] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2046.116140] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2046.116311] env[69784]: DEBUG nova.virt.hardware [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2046.117185] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d68a8d-b3cb-4ff6-8346-7e98539916ea {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.125561] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5428f33-f2f5-4999-bc9f-6a211aafbfae {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.446186] env[69784]: DEBUG nova.network.neutron [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Successfully created port: 42f5ccad-ade9-447a-b1d6-7ee3ac3e273a {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2047.067301] env[69784]: DEBUG nova.network.neutron [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Successfully updated port: 42f5ccad-ade9-447a-b1d6-7ee3ac3e273a {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2047.079240] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "refresh_cache-e35ffffb-c0fd-4236-b489-80eb0fdb4e37" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2047.079402] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired lock "refresh_cache-e35ffffb-c0fd-4236-b489-80eb0fdb4e37" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2047.079924] env[69784]: DEBUG nova.network.neutron [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2047.117315] env[69784]: DEBUG nova.network.neutron [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2047.513507] env[69784]: DEBUG nova.network.neutron [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Updating instance_info_cache with network_info: [{"id": "42f5ccad-ade9-447a-b1d6-7ee3ac3e273a", "address": "fa:16:3e:19:ab:b2", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42f5ccad-ad", "ovs_interfaceid": "42f5ccad-ade9-447a-b1d6-7ee3ac3e273a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.525042] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Releasing lock "refresh_cache-e35ffffb-c0fd-4236-b489-80eb0fdb4e37" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2047.525042] env[69784]: DEBUG nova.compute.manager [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Instance network_info: |[{"id": "42f5ccad-ade9-447a-b1d6-7ee3ac3e273a", "address": "fa:16:3e:19:ab:b2", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42f5ccad-ad", "ovs_interfaceid": "42f5ccad-ade9-447a-b1d6-7ee3ac3e273a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2047.525292] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:ab:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42f5ccad-ade9-447a-b1d6-7ee3ac3e273a', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2047.532336] env[69784]: DEBUG oslo.service.loopingcall [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2047.532876] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2047.533126] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35c7ff7b-cb02-4852-b9c5-8f1b82d99bed {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.554425] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2047.554425] env[69784]: value = "task-3467212" [ 2047.554425] env[69784]: _type = "Task" [ 2047.554425] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.563944] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467212, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.607760] env[69784]: DEBUG nova.compute.manager [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Received event network-vif-plugged-42f5ccad-ade9-447a-b1d6-7ee3ac3e273a {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2047.608040] env[69784]: DEBUG oslo_concurrency.lockutils [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] Acquiring lock "e35ffffb-c0fd-4236-b489-80eb0fdb4e37-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2047.608308] env[69784]: DEBUG oslo_concurrency.lockutils [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] Lock "e35ffffb-c0fd-4236-b489-80eb0fdb4e37-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2047.608385] env[69784]: DEBUG oslo_concurrency.lockutils [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] Lock "e35ffffb-c0fd-4236-b489-80eb0fdb4e37-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2047.608550] env[69784]: DEBUG nova.compute.manager [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] No waiting events found dispatching network-vif-plugged-42f5ccad-ade9-447a-b1d6-7ee3ac3e273a {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2047.608715] env[69784]: WARNING nova.compute.manager [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Received unexpected event network-vif-plugged-42f5ccad-ade9-447a-b1d6-7ee3ac3e273a for instance with vm_state building and task_state spawning. [ 2047.608950] env[69784]: DEBUG nova.compute.manager [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Received event network-changed-42f5ccad-ade9-447a-b1d6-7ee3ac3e273a {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2047.609041] env[69784]: DEBUG nova.compute.manager [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Refreshing instance network info cache due to event network-changed-42f5ccad-ade9-447a-b1d6-7ee3ac3e273a. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2047.609271] env[69784]: DEBUG oslo_concurrency.lockutils [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] Acquiring lock "refresh_cache-e35ffffb-c0fd-4236-b489-80eb0fdb4e37" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2047.609352] env[69784]: DEBUG oslo_concurrency.lockutils [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] Acquired lock "refresh_cache-e35ffffb-c0fd-4236-b489-80eb0fdb4e37" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2047.609634] env[69784]: DEBUG nova.network.neutron [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Refreshing network info cache for port 42f5ccad-ade9-447a-b1d6-7ee3ac3e273a {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2047.849041] env[69784]: DEBUG nova.network.neutron [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Updated VIF entry in instance network info cache for port 42f5ccad-ade9-447a-b1d6-7ee3ac3e273a. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2047.849509] env[69784]: DEBUG nova.network.neutron [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Updating instance_info_cache with network_info: [{"id": "42f5ccad-ade9-447a-b1d6-7ee3ac3e273a", "address": "fa:16:3e:19:ab:b2", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42f5ccad-ad", "ovs_interfaceid": "42f5ccad-ade9-447a-b1d6-7ee3ac3e273a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.860222] env[69784]: DEBUG oslo_concurrency.lockutils [req-369baced-fd49-47f2-8a20-fb2f2c64daa0 req-8854a428-8fed-4c28-baed-e35f3860fac0 service nova] Releasing lock "refresh_cache-e35ffffb-c0fd-4236-b489-80eb0fdb4e37" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2048.066342] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467212, 'name': CreateVM_Task, 'duration_secs': 0.314268} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.066504] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2048.067145] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2048.067313] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2048.067629] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2048.067875] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ece3cdd-1f8c-4f21-87e6-950bdf7afab7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.072099] env[69784]: DEBUG oslo_vmware.api [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for the task: (returnval){ [ 2048.072099] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52f1965b-0e64-afd3-924c-4748b838fced" [ 2048.072099] env[69784]: _type = "Task" [ 2048.072099] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.079418] env[69784]: DEBUG oslo_vmware.api [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52f1965b-0e64-afd3-924c-4748b838fced, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.582254] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2048.582518] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2048.582819] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2060.518951] env[69784]: DEBUG oslo_concurrency.lockutils [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "c78438d5-ddaa-4858-a161-af83e6c16e54" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2065.841195] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2067.841509] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2067.841805] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2068.840286] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.839331] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2071.840594] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2072.841055] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2072.852737] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2072.852989] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2072.853175] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2072.853331] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2072.854447] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2e2d44-e785-45bd-8ff6-f363902a94ce {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.863220] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4e701a-25e3-44d5-b317-33f2a3aff012 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.876812] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2e69ed-31c8-4752-976b-74c04a46245e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.882801] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aafebfd-8666-4c0d-b9b9-660d3367fdf2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.912245] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180962MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2072.912385] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2072.912571] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2072.984868] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985037] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985168] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985291] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985409] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985526] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985643] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985756] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985870] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.985984] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e35ffffb-c0fd-4236-b489-80eb0fdb4e37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.986174] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2072.986310] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2073.099607] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccf18a4-b331-4847-9f73-7ab7e213e4ef {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.107262] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c423c6-9b3b-4dcb-b392-9ab2fdfacbea {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.136623] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f91cc75-9786-47ef-a18d-af8e54b1a4a6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.143451] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a784bc7f-2df6-4875-b6dd-40b474d49c6e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.156812] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2073.165101] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2073.179937] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2073.180138] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.268s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2076.180393] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2076.180393] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2076.180767] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2076.200643] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.200793] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.200928] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.201157] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.201273] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.201397] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.201518] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.201637] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.201754] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.201907] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2076.202039] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2076.856800] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.834566] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.855146] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.641196] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "0a177681-5f4e-4dc5-baee-1303be38444a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2091.653258] env[69784]: WARNING oslo_vmware.rw_handles [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2091.653258] env[69784]: ERROR oslo_vmware.rw_handles [ 2091.654148] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2091.656323] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2091.656570] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Copying Virtual Disk [datastore1] vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/51d0dd6b-c738-425f-9098-9eba0dcebb4d/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2091.656859] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84ee2f91-f2fc-437c-a662-41f172b06956 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.666542] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for the task: (returnval){ [ 2091.666542] env[69784]: value = "task-3467213" [ 2091.666542] env[69784]: _type = "Task" [ 2091.666542] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.674433] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': task-3467213, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.177820] env[69784]: DEBUG oslo_vmware.exceptions [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2092.178081] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2092.178706] env[69784]: ERROR nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2092.178706] env[69784]: Faults: ['InvalidArgument'] [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Traceback (most recent call last): [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] yield resources [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] self.driver.spawn(context, instance, image_meta, [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] self._fetch_image_if_missing(context, vi) [ 2092.178706] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] image_cache(vi, tmp_image_ds_loc) [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] vm_util.copy_virtual_disk( [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] session._wait_for_task(vmdk_copy_task) [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] return self.wait_for_task(task_ref) [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] return evt.wait() [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] result = hub.switch() [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2092.178978] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] return self.greenlet.switch() [ 2092.179536] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2092.179536] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] self.f(*self.args, **self.kw) [ 2092.179536] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2092.179536] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] raise exceptions.translate_fault(task_info.error) [ 2092.179536] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2092.179536] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Faults: ['InvalidArgument'] [ 2092.179536] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] [ 2092.179536] env[69784]: INFO nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Terminating instance [ 2092.180635] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2092.180838] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2092.181087] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1937b2cb-7bc7-4146-a762-13aca321152b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.183526] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2092.183720] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2092.184448] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9522c9e9-d6f9-4ee3-92bb-4864acf7b4d3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.190976] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2092.191298] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9db705b-e6fc-4d42-8e76-d4ce76b1c429 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.193341] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2092.193514] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2092.194511] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73e19c1c-adbe-4d20-9916-179ee7290b0b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.199604] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for the task: (returnval){ [ 2092.199604] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52b02eb2-b57d-ae80-a272-1273501ba03e" [ 2092.199604] env[69784]: _type = "Task" [ 2092.199604] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.206517] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52b02eb2-b57d-ae80-a272-1273501ba03e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.261343] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2092.261562] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2092.261743] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Deleting the datastore file [datastore1] 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2092.262030] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b35eca56-61bc-42fc-bb09-47d0ccf0de94 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.267663] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for the task: (returnval){ [ 2092.267663] env[69784]: value = "task-3467215" [ 2092.267663] env[69784]: _type = "Task" [ 2092.267663] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.274936] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': task-3467215, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.711075] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2092.711445] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Creating directory with path [datastore1] vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2092.711565] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13c105da-8465-407e-a83a-7a0488a37e06 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.722663] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Created directory with path [datastore1] vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2092.722830] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Fetch image to [datastore1] vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2092.723031] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2092.723714] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ef6e9d-5824-442e-9219-1742ff1c9c82 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.729823] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bb1740-e4ce-4e2c-acd1-27885705d814 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.738292] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520b83a3-a2b2-4e98-8db9-3488939c6a39 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.767922] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95f635d-344f-4702-9783-37e89d803161 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.777933] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fde9cef7-8c64-48d3-8edd-c912c73b42d5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.779530] env[69784]: DEBUG oslo_vmware.api [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': task-3467215, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077642} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.779764] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2092.779982] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2092.780219] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2092.780348] env[69784]: INFO nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2092.782398] env[69784]: DEBUG nova.compute.claims [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2092.782560] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2092.782770] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2092.799927] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2092.915124] env[69784]: DEBUG oslo_vmware.rw_handles [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2092.976269] env[69784]: DEBUG oslo_vmware.rw_handles [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2092.976484] env[69784]: DEBUG oslo_vmware.rw_handles [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2092.998082] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122ec1b9-ea26-44a2-a69f-84d3da0ba84e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.006631] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f380f248-be63-4f68-933f-f3d86d3e627b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.037311] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d128d6b-2551-4dbc-9db2-f4e748fda420 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.044721] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a2db1a-136a-4df6-a492-e532225705b1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.058386] env[69784]: DEBUG nova.compute.provider_tree [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2093.067572] env[69784]: DEBUG nova.scheduler.client.report [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2093.086012] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.302s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2093.086012] env[69784]: ERROR nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2093.086012] env[69784]: Faults: ['InvalidArgument'] [ 2093.086012] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Traceback (most recent call last): [ 2093.086012] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2093.086012] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] self.driver.spawn(context, instance, image_meta, [ 2093.086012] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2093.086012] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2093.086012] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2093.086012] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] self._fetch_image_if_missing(context, vi) [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] image_cache(vi, tmp_image_ds_loc) [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] vm_util.copy_virtual_disk( [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] session._wait_for_task(vmdk_copy_task) [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] return self.wait_for_task(task_ref) [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] return evt.wait() [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] result = hub.switch() [ 2093.086319] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2093.086601] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] return self.greenlet.switch() [ 2093.086601] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2093.086601] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] self.f(*self.args, **self.kw) [ 2093.086601] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2093.086601] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] raise exceptions.translate_fault(task_info.error) [ 2093.086601] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2093.086601] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Faults: ['InvalidArgument'] [ 2093.086601] env[69784]: ERROR nova.compute.manager [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] [ 2093.086601] env[69784]: DEBUG nova.compute.utils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2093.087892] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Build of instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d was re-scheduled: A specified parameter was not correct: fileType [ 2093.087892] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2093.088293] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2093.088470] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2093.088718] env[69784]: DEBUG nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2093.088907] env[69784]: DEBUG nova.network.neutron [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2093.483146] env[69784]: DEBUG nova.network.neutron [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.499528] env[69784]: INFO nova.compute.manager [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Took 0.41 seconds to deallocate network for instance. [ 2093.585649] env[69784]: INFO nova.scheduler.client.report [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Deleted allocations for instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d [ 2093.602531] env[69784]: DEBUG oslo_concurrency.lockutils [None req-e05b302e-5660-4c61-a3bd-34c376fdba1d tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 597.367s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2093.602793] env[69784]: DEBUG oslo_concurrency.lockutils [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 401.442s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2093.603026] env[69784]: DEBUG oslo_concurrency.lockutils [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2093.603246] env[69784]: DEBUG oslo_concurrency.lockutils [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2093.603414] env[69784]: DEBUG oslo_concurrency.lockutils [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2093.605595] env[69784]: INFO nova.compute.manager [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Terminating instance [ 2093.607233] env[69784]: DEBUG nova.compute.manager [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2093.607422] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2093.607891] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d50d1bc-33f2-400e-a517-af624ae0e88b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.616678] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6fd81e-36bc-4712-b638-75cf3cfbd603 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.644796] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d could not be found. [ 2093.644967] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2093.645162] env[69784]: INFO nova.compute.manager [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2093.645392] env[69784]: DEBUG oslo.service.loopingcall [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2093.645608] env[69784]: DEBUG nova.compute.manager [-] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2093.645706] env[69784]: DEBUG nova.network.neutron [-] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2093.689082] env[69784]: DEBUG nova.network.neutron [-] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.697746] env[69784]: INFO nova.compute.manager [-] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] Took 0.05 seconds to deallocate network for instance. [ 2093.790541] env[69784]: DEBUG oslo_concurrency.lockutils [None req-598f0593-5cfa-45f9-8a56-f20950968e26 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2093.791414] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 215.901s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2093.791614] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0d0e7d04-847c-486f-8cb1-b2b3afe33f2d] During sync_power_state the instance has a pending task (deleting). Skip. [ 2093.791790] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "0d0e7d04-847c-486f-8cb1-b2b3afe33f2d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2127.840649] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.841064] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.841064] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2128.370319] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "86a94763-92c8-4689-b37a-3dc6c1ec744c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2128.370602] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "86a94763-92c8-4689-b37a-3dc6c1ec744c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2128.381936] env[69784]: DEBUG nova.compute.manager [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2128.428116] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2128.428348] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2128.429682] env[69784]: INFO nova.compute.claims [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2128.615268] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e01fb5-d975-448b-92fb-a53ec0d4588f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.622640] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1afadd-16e3-4b97-8d3d-93f9ed6c05a7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.653341] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f772c1-c5eb-4111-b41a-db20bcb100ce {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.661075] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2be5e89-c19d-444d-bb58-0548e979b41c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.674768] env[69784]: DEBUG nova.compute.provider_tree [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2128.684998] env[69784]: DEBUG nova.scheduler.client.report [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2128.698140] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2128.698646] env[69784]: DEBUG nova.compute.manager [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2128.733991] env[69784]: DEBUG nova.compute.utils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2128.735481] env[69784]: DEBUG nova.compute.manager [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2128.735652] env[69784]: DEBUG nova.network.neutron [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2128.743926] env[69784]: DEBUG nova.compute.manager [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2128.800184] env[69784]: DEBUG nova.policy [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '863e5eb3480c4cf7ae5b3109ac113718', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e91214f37d741d7935308a363541f5c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 2128.808957] env[69784]: DEBUG nova.compute.manager [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2128.832949] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2128.833122] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2128.833230] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2128.833422] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2128.833597] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2128.833746] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2128.833955] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2128.834131] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2128.834311] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2128.834475] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2128.834652] env[69784]: DEBUG nova.virt.hardware [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2128.835594] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf11b47-14c7-4746-92ab-412e27186a9a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.843345] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf87b11-6ae1-4a38-8a09-15b8f0ac0891 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.228049] env[69784]: DEBUG nova.network.neutron [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Successfully created port: 54fa18a9-8bed-468a-b09a-b627b4808daa {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2129.750630] env[69784]: DEBUG nova.compute.manager [req-eac016c9-56bc-4085-82c3-5ed0795d0c5c req-27cd385b-b3cd-4d78-9baf-261744b85d07 service nova] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Received event network-vif-plugged-54fa18a9-8bed-468a-b09a-b627b4808daa {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2129.750858] env[69784]: DEBUG oslo_concurrency.lockutils [req-eac016c9-56bc-4085-82c3-5ed0795d0c5c req-27cd385b-b3cd-4d78-9baf-261744b85d07 service nova] Acquiring lock "86a94763-92c8-4689-b37a-3dc6c1ec744c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2129.751100] env[69784]: DEBUG oslo_concurrency.lockutils [req-eac016c9-56bc-4085-82c3-5ed0795d0c5c req-27cd385b-b3cd-4d78-9baf-261744b85d07 service nova] Lock "86a94763-92c8-4689-b37a-3dc6c1ec744c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2129.751276] env[69784]: DEBUG oslo_concurrency.lockutils [req-eac016c9-56bc-4085-82c3-5ed0795d0c5c req-27cd385b-b3cd-4d78-9baf-261744b85d07 service nova] Lock "86a94763-92c8-4689-b37a-3dc6c1ec744c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2129.751447] env[69784]: DEBUG nova.compute.manager [req-eac016c9-56bc-4085-82c3-5ed0795d0c5c req-27cd385b-b3cd-4d78-9baf-261744b85d07 service nova] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] No waiting events found dispatching network-vif-plugged-54fa18a9-8bed-468a-b09a-b627b4808daa {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2129.751613] env[69784]: WARNING nova.compute.manager [req-eac016c9-56bc-4085-82c3-5ed0795d0c5c req-27cd385b-b3cd-4d78-9baf-261744b85d07 service nova] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Received unexpected event network-vif-plugged-54fa18a9-8bed-468a-b09a-b627b4808daa for instance with vm_state building and task_state spawning. [ 2129.837576] env[69784]: DEBUG nova.network.neutron [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Successfully updated port: 54fa18a9-8bed-468a-b09a-b627b4808daa {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2129.840523] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2129.848685] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "refresh_cache-86a94763-92c8-4689-b37a-3dc6c1ec744c" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2129.849090] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquired lock "refresh_cache-86a94763-92c8-4689-b37a-3dc6c1ec744c" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2129.849237] env[69784]: DEBUG nova.network.neutron [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2129.883203] env[69784]: DEBUG nova.network.neutron [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2130.064485] env[69784]: DEBUG nova.network.neutron [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Updating instance_info_cache with network_info: [{"id": "54fa18a9-8bed-468a-b09a-b627b4808daa", "address": "fa:16:3e:5b:41:71", "network": {"id": "81e149bf-ad1b-4970-8b37-685f69ec18f7", "bridge": "br-int", "label": "tempest-ServersTestJSON-918442431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e91214f37d741d7935308a363541f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fa18a9-8b", "ovs_interfaceid": "54fa18a9-8bed-468a-b09a-b627b4808daa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2130.075649] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Releasing lock "refresh_cache-86a94763-92c8-4689-b37a-3dc6c1ec744c" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2130.076196] env[69784]: DEBUG nova.compute.manager [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Instance network_info: |[{"id": "54fa18a9-8bed-468a-b09a-b627b4808daa", "address": "fa:16:3e:5b:41:71", "network": {"id": "81e149bf-ad1b-4970-8b37-685f69ec18f7", "bridge": "br-int", "label": "tempest-ServersTestJSON-918442431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e91214f37d741d7935308a363541f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fa18a9-8b", "ovs_interfaceid": "54fa18a9-8bed-468a-b09a-b627b4808daa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2130.076341] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:41:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a92a4ffe-7939-4697-bf98-5b22e2c7feda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54fa18a9-8bed-468a-b09a-b627b4808daa', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2130.086017] env[69784]: DEBUG oslo.service.loopingcall [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2130.086017] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2130.086017] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51dab513-46ef-488e-a888-14c4c85792a1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.105445] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2130.105445] env[69784]: value = "task-3467216" [ 2130.105445] env[69784]: _type = "Task" [ 2130.105445] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.113695] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467216, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.619194] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467216, 'name': CreateVM_Task, 'duration_secs': 0.286088} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.619194] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2130.619610] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2130.619853] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2130.620329] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2130.620679] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38e26e31-de6c-48bb-a7e3-deb7c0ddfcb2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.626525] env[69784]: DEBUG oslo_vmware.api [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Waiting for the task: (returnval){ [ 2130.626525] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52f7a1bd-6cff-12a1-c113-9aa7d9e52f59" [ 2130.626525] env[69784]: _type = "Task" [ 2130.626525] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.638231] env[69784]: DEBUG oslo_vmware.api [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52f7a1bd-6cff-12a1-c113-9aa7d9e52f59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.135969] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2131.136285] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2131.136474] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c582e4fc-b55f-4c61-9efd-544e3a04e563 tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2131.781582] env[69784]: DEBUG nova.compute.manager [req-1f09b1d5-f1e2-4299-9aed-789b2a37cb62 req-d9e3cdd2-d68e-4b51-8544-67e706daf1c2 service nova] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Received event network-changed-54fa18a9-8bed-468a-b09a-b627b4808daa {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2131.781793] env[69784]: DEBUG nova.compute.manager [req-1f09b1d5-f1e2-4299-9aed-789b2a37cb62 req-d9e3cdd2-d68e-4b51-8544-67e706daf1c2 service nova] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Refreshing instance network info cache due to event network-changed-54fa18a9-8bed-468a-b09a-b627b4808daa. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2131.782134] env[69784]: DEBUG oslo_concurrency.lockutils [req-1f09b1d5-f1e2-4299-9aed-789b2a37cb62 req-d9e3cdd2-d68e-4b51-8544-67e706daf1c2 service nova] Acquiring lock "refresh_cache-86a94763-92c8-4689-b37a-3dc6c1ec744c" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2131.782342] env[69784]: DEBUG oslo_concurrency.lockutils [req-1f09b1d5-f1e2-4299-9aed-789b2a37cb62 req-d9e3cdd2-d68e-4b51-8544-67e706daf1c2 service nova] Acquired lock "refresh_cache-86a94763-92c8-4689-b37a-3dc6c1ec744c" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2131.782651] env[69784]: DEBUG nova.network.neutron [req-1f09b1d5-f1e2-4299-9aed-789b2a37cb62 req-d9e3cdd2-d68e-4b51-8544-67e706daf1c2 service nova] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Refreshing network info cache for port 54fa18a9-8bed-468a-b09a-b627b4808daa {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2132.177733] env[69784]: DEBUG nova.network.neutron [req-1f09b1d5-f1e2-4299-9aed-789b2a37cb62 req-d9e3cdd2-d68e-4b51-8544-67e706daf1c2 service nova] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Updated VIF entry in instance network info cache for port 54fa18a9-8bed-468a-b09a-b627b4808daa. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2132.178107] env[69784]: DEBUG nova.network.neutron [req-1f09b1d5-f1e2-4299-9aed-789b2a37cb62 req-d9e3cdd2-d68e-4b51-8544-67e706daf1c2 service nova] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Updating instance_info_cache with network_info: [{"id": "54fa18a9-8bed-468a-b09a-b627b4808daa", "address": "fa:16:3e:5b:41:71", "network": {"id": "81e149bf-ad1b-4970-8b37-685f69ec18f7", "bridge": "br-int", "label": "tempest-ServersTestJSON-918442431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e91214f37d741d7935308a363541f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fa18a9-8b", "ovs_interfaceid": "54fa18a9-8bed-468a-b09a-b627b4808daa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.187522] env[69784]: DEBUG oslo_concurrency.lockutils [req-1f09b1d5-f1e2-4299-9aed-789b2a37cb62 req-d9e3cdd2-d68e-4b51-8544-67e706daf1c2 service nova] Releasing lock "refresh_cache-86a94763-92c8-4689-b37a-3dc6c1ec744c" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2132.839330] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2133.840138] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2134.839765] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2134.850983] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2134.851248] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2134.851383] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2134.851535] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2134.852702] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e5fdeb-3b05-4b55-bf34-b773ae0477bb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.861295] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee75cb98-7513-4067-b5b0-6f4ed7ef897c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.874740] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec64ec8-7b54-4d08-8dce-d85b214b70e3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.880735] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc9c9d6-cb58-4048-b176-dfcac2ca95aa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.909726] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180944MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2134.909866] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2134.910087] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2135.019222] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.019395] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 7632e563-1790-442f-9e13-77f3d93e4223 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.019526] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.019651] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.019770] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.019888] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.020009] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.020140] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.020257] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e35ffffb-c0fd-4236-b489-80eb0fdb4e37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.020371] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 86a94763-92c8-4689-b37a-3dc6c1ec744c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.020586] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2135.020728] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2135.036200] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing inventories for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2135.049846] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating ProviderTree inventory for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2135.050030] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating inventory in ProviderTree for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2135.059777] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing aggregate associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, aggregates: None {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2135.077044] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing trait associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2135.184872] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbee5b61-e96a-4d26-b54a-39bd7ab9a233 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.192375] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4204f90d-bf06-4707-885a-e4ad4f479ffa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.221247] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd8cab6-ab66-4ca7-90e4-fe67d3ac1a23 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.228234] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cc38b6-40ad-41ef-a5c6-8a94935fb82e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.241438] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2135.251023] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2135.266469] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2135.266666] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.357s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2138.262080] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.262387] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.262543] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2138.262690] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2138.282171] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.282290] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.282413] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.282549] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.282686] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.282813] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.282934] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.283065] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.283188] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.283304] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.283423] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2138.283863] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.840381] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.840805] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances with incomplete migration {{(pid=69784) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2140.878363] env[69784]: WARNING oslo_vmware.rw_handles [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2140.878363] env[69784]: ERROR oslo_vmware.rw_handles [ 2140.878869] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2140.880909] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2140.881177] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Copying Virtual Disk [datastore1] vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/8eefb929-6aeb-4e31-a32e-0500cbe09f2b/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2140.881643] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af806a44-69a6-417a-b467-3a44f51f821b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.889655] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for the task: (returnval){ [ 2140.889655] env[69784]: value = "task-3467217" [ 2140.889655] env[69784]: _type = "Task" [ 2140.889655] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.897249] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': task-3467217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.399475] env[69784]: DEBUG oslo_vmware.exceptions [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2141.399772] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2141.400381] env[69784]: ERROR nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2141.400381] env[69784]: Faults: ['InvalidArgument'] [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Traceback (most recent call last): [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] yield resources [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] self.driver.spawn(context, instance, image_meta, [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] self._fetch_image_if_missing(context, vi) [ 2141.400381] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] image_cache(vi, tmp_image_ds_loc) [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] vm_util.copy_virtual_disk( [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] session._wait_for_task(vmdk_copy_task) [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] return self.wait_for_task(task_ref) [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] return evt.wait() [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] result = hub.switch() [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2141.400720] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] return self.greenlet.switch() [ 2141.401087] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2141.401087] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] self.f(*self.args, **self.kw) [ 2141.401087] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2141.401087] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] raise exceptions.translate_fault(task_info.error) [ 2141.401087] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2141.401087] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Faults: ['InvalidArgument'] [ 2141.401087] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] [ 2141.401087] env[69784]: INFO nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Terminating instance [ 2141.402341] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2141.402434] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2141.402669] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b83ba468-097c-430a-a4b2-15a19619aee2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.404859] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2141.405094] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2141.405806] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db1f676-960d-44be-8392-a82c0997a328 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.412494] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2141.412718] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-034e54d3-89d4-45b5-ac2a-6feeccd30e24 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.414713] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2141.414890] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2141.415832] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f42f8523-a6ac-4137-90bc-1e18301a7463 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.420265] env[69784]: DEBUG oslo_vmware.api [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Waiting for the task: (returnval){ [ 2141.420265] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]529a291a-d672-4892-2895-3eb578ecb4c0" [ 2141.420265] env[69784]: _type = "Task" [ 2141.420265] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.433957] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2141.434203] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Creating directory with path [datastore1] vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2141.434418] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c38d59c-a4d1-4c98-ab4f-46137d160bd6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.454714] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Created directory with path [datastore1] vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2141.454952] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Fetch image to [datastore1] vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2141.455103] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2141.455860] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b33d8a1-96d5-4a35-bb34-4800a8a8178c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.462637] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9def0400-8ec8-4001-8f8e-1a18775b78ed {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.471643] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfff803-8dfb-4d9c-9960-96529dac5a41 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.504027] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07ef5e7-e5cd-4354-8603-e5346eb9f548 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.507119] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2141.507119] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2141.507197] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Deleting the datastore file [datastore1] ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2141.507424] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-444c2410-6f4a-4ab3-8eff-ce70c93be48f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.512172] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6ea07aec-c9e1-456e-9934-c32d37f55c64 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.515026] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for the task: (returnval){ [ 2141.515026] env[69784]: value = "task-3467219" [ 2141.515026] env[69784]: _type = "Task" [ 2141.515026] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.522246] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': task-3467219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.532035] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2141.580552] env[69784]: DEBUG oslo_vmware.rw_handles [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2141.639512] env[69784]: DEBUG oslo_vmware.rw_handles [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2141.639720] env[69784]: DEBUG oslo_vmware.rw_handles [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2141.839653] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.025361] env[69784]: DEBUG oslo_vmware.api [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Task: {'id': task-3467219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074308} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.025752] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2142.025752] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2142.025928] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2142.026114] env[69784]: INFO nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2142.028044] env[69784]: DEBUG nova.compute.claims [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2142.028217] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2142.028427] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2142.189190] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5b7beb-7abb-4798-a9c5-385aef79ab86 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.195864] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bcdd4c-a71c-4a5d-8f1a-df4b262d35e8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.224359] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec1ab76-c746-4d1d-82be-c410783183ea {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.231377] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7125f5-d853-4310-8d78-9db799d56f07 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.244069] env[69784]: DEBUG nova.compute.provider_tree [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2142.251991] env[69784]: DEBUG nova.scheduler.client.report [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2142.265788] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.237s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2142.266321] env[69784]: ERROR nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2142.266321] env[69784]: Faults: ['InvalidArgument'] [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Traceback (most recent call last): [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] self.driver.spawn(context, instance, image_meta, [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] self._fetch_image_if_missing(context, vi) [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] image_cache(vi, tmp_image_ds_loc) [ 2142.266321] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] vm_util.copy_virtual_disk( [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] session._wait_for_task(vmdk_copy_task) [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] return self.wait_for_task(task_ref) [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] return evt.wait() [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] result = hub.switch() [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] return self.greenlet.switch() [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2142.266713] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] self.f(*self.args, **self.kw) [ 2142.266984] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2142.266984] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] raise exceptions.translate_fault(task_info.error) [ 2142.266984] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2142.266984] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Faults: ['InvalidArgument'] [ 2142.266984] env[69784]: ERROR nova.compute.manager [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] [ 2142.267109] env[69784]: DEBUG nova.compute.utils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2142.268376] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Build of instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 was re-scheduled: A specified parameter was not correct: fileType [ 2142.268376] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2142.268749] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2142.268920] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2142.269105] env[69784]: DEBUG nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2142.269268] env[69784]: DEBUG nova.network.neutron [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2142.684319] env[69784]: DEBUG nova.network.neutron [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2142.697455] env[69784]: INFO nova.compute.manager [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Took 0.43 seconds to deallocate network for instance. [ 2142.790041] env[69784]: INFO nova.scheduler.client.report [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Deleted allocations for instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 [ 2142.810089] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b35de6b3-77fa-41cd-9ae8-c142d6b4d6d2 tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.830s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2142.810376] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 421.136s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2142.810598] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Acquiring lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2142.810803] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2142.810969] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2142.813357] env[69784]: INFO nova.compute.manager [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Terminating instance [ 2142.815334] env[69784]: DEBUG nova.compute.manager [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2142.815527] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2142.815785] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-945cdf3a-a78b-43af-a9c4-a2dfcda65d05 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.825389] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed75bd06-c948-440a-ac4e-368377ebff4d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.855328] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07 could not be found. [ 2142.855515] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2142.855696] env[69784]: INFO nova.compute.manager [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2142.855942] env[69784]: DEBUG oslo.service.loopingcall [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2142.856184] env[69784]: DEBUG nova.compute.manager [-] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2142.856283] env[69784]: DEBUG nova.network.neutron [-] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2142.878037] env[69784]: DEBUG nova.network.neutron [-] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2142.885988] env[69784]: INFO nova.compute.manager [-] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] Took 0.03 seconds to deallocate network for instance. [ 2142.973577] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4a8dff9e-57c7-44e1-9aa3-79f65c61d92a tempest-AttachVolumeTestJSON-1532019486 tempest-AttachVolumeTestJSON-1532019486-project-member] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2142.974335] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 265.084s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2142.974625] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07] During sync_power_state the instance has a pending task (deleting). Skip. [ 2142.974823] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "ef9b5e6c-14f3-4bf0-83e0-4a0cc6d96f07" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2145.186586] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "e1969fb1-0f70-42c9-a362-f1efb6ee4619" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2145.186924] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "e1969fb1-0f70-42c9-a362-f1efb6ee4619" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2145.196713] env[69784]: DEBUG nova.compute.manager [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2145.242379] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2145.242622] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2145.244096] env[69784]: INFO nova.compute.claims [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2145.418044] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0333e43c-28c9-4744-ba65-c8321b28ffe1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.425431] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5e7d76-729e-4cd5-8437-6917c8c9af96 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.454509] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba73d87b-2dc6-4d7d-bfbc-ae58f082d517 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.461386] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42505b0-3cb4-4bef-9242-4f98a3447020 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.473948] env[69784]: DEBUG nova.compute.provider_tree [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2145.482418] env[69784]: DEBUG nova.scheduler.client.report [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2145.496795] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.254s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2145.497263] env[69784]: DEBUG nova.compute.manager [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2145.528175] env[69784]: DEBUG nova.compute.utils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2145.529561] env[69784]: DEBUG nova.compute.manager [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2145.529731] env[69784]: DEBUG nova.network.neutron [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2145.539205] env[69784]: DEBUG nova.compute.manager [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2145.584549] env[69784]: DEBUG nova.policy [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c93b274686c34049be1b37ef70656616', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0126dda00a44838ac749dee6f266970', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 2145.601042] env[69784]: DEBUG nova.compute.manager [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2145.620631] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2145.620858] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2145.621018] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2145.621197] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2145.621339] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2145.621480] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2145.621681] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2145.621839] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2145.622009] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2145.622177] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2145.622348] env[69784]: DEBUG nova.virt.hardware [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2145.623459] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee538dd3-69a7-4635-9cda-2bf69a9da5a4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.631034] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfebcd68-a7d2-4e11-81cc-77b9356ebdba {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.928118] env[69784]: DEBUG nova.network.neutron [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Successfully created port: e925b30b-c1fc-4803-9254-ead1bd249612 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2146.670408] env[69784]: DEBUG nova.compute.manager [req-163a28a7-aa1f-4d02-984b-cc44cc141af1 req-d637b0a3-7223-4abc-8e5c-3f1119786000 service nova] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Received event network-vif-plugged-e925b30b-c1fc-4803-9254-ead1bd249612 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2146.670683] env[69784]: DEBUG oslo_concurrency.lockutils [req-163a28a7-aa1f-4d02-984b-cc44cc141af1 req-d637b0a3-7223-4abc-8e5c-3f1119786000 service nova] Acquiring lock "e1969fb1-0f70-42c9-a362-f1efb6ee4619-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2146.671155] env[69784]: DEBUG oslo_concurrency.lockutils [req-163a28a7-aa1f-4d02-984b-cc44cc141af1 req-d637b0a3-7223-4abc-8e5c-3f1119786000 service nova] Lock "e1969fb1-0f70-42c9-a362-f1efb6ee4619-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2146.671155] env[69784]: DEBUG oslo_concurrency.lockutils [req-163a28a7-aa1f-4d02-984b-cc44cc141af1 req-d637b0a3-7223-4abc-8e5c-3f1119786000 service nova] Lock "e1969fb1-0f70-42c9-a362-f1efb6ee4619-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2146.671404] env[69784]: DEBUG nova.compute.manager [req-163a28a7-aa1f-4d02-984b-cc44cc141af1 req-d637b0a3-7223-4abc-8e5c-3f1119786000 service nova] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] No waiting events found dispatching network-vif-plugged-e925b30b-c1fc-4803-9254-ead1bd249612 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2146.671610] env[69784]: WARNING nova.compute.manager [req-163a28a7-aa1f-4d02-984b-cc44cc141af1 req-d637b0a3-7223-4abc-8e5c-3f1119786000 service nova] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Received unexpected event network-vif-plugged-e925b30b-c1fc-4803-9254-ead1bd249612 for instance with vm_state building and task_state spawning. [ 2146.750930] env[69784]: DEBUG nova.network.neutron [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Successfully updated port: e925b30b-c1fc-4803-9254-ead1bd249612 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2146.761976] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "refresh_cache-e1969fb1-0f70-42c9-a362-f1efb6ee4619" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2146.762244] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "refresh_cache-e1969fb1-0f70-42c9-a362-f1efb6ee4619" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2146.762244] env[69784]: DEBUG nova.network.neutron [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2146.800508] env[69784]: DEBUG nova.network.neutron [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2147.020116] env[69784]: DEBUG nova.network.neutron [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Updating instance_info_cache with network_info: [{"id": "e925b30b-c1fc-4803-9254-ead1bd249612", "address": "fa:16:3e:05:1d:bb", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape925b30b-c1", "ovs_interfaceid": "e925b30b-c1fc-4803-9254-ead1bd249612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2147.030860] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "refresh_cache-e1969fb1-0f70-42c9-a362-f1efb6ee4619" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2147.031621] env[69784]: DEBUG nova.compute.manager [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Instance network_info: |[{"id": "e925b30b-c1fc-4803-9254-ead1bd249612", "address": "fa:16:3e:05:1d:bb", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape925b30b-c1", "ovs_interfaceid": "e925b30b-c1fc-4803-9254-ead1bd249612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2147.031753] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:1d:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ff81f9-72b2-4e58-a8d8-5699907f7459', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e925b30b-c1fc-4803-9254-ead1bd249612', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2147.038926] env[69784]: DEBUG oslo.service.loopingcall [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2147.039422] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2147.039653] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc5ffbe6-1717-4b87-8806-fdf3eb413cd1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.060527] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2147.060527] env[69784]: value = "task-3467220" [ 2147.060527] env[69784]: _type = "Task" [ 2147.060527] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.068479] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467220, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.570960] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467220, 'name': CreateVM_Task, 'duration_secs': 0.302533} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.571142] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2147.571782] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2147.571943] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2147.572285] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2147.572520] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ff1c0a0-823a-4c5c-a5b1-ec1758183080 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.576519] env[69784]: DEBUG oslo_vmware.api [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 2147.576519] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52951a1c-2ca8-8e6b-c060-ab5c6330474a" [ 2147.576519] env[69784]: _type = "Task" [ 2147.576519] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.585802] env[69784]: DEBUG oslo_vmware.api [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52951a1c-2ca8-8e6b-c060-ab5c6330474a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.847483] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2147.847861] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2147.858363] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] There are 0 instances to clean {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2148.086535] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2148.086788] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2148.087008] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8afe7e4d-d6dc-4f1c-87fe-d615dacb7a8f tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2148.697020] env[69784]: DEBUG nova.compute.manager [req-abe9c7f1-0b19-4c7a-8baa-6d0f42392fa6 req-1f33cbaa-5a43-41c7-a4e5-538b4c8fba67 service nova] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Received event network-changed-e925b30b-c1fc-4803-9254-ead1bd249612 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2148.697020] env[69784]: DEBUG nova.compute.manager [req-abe9c7f1-0b19-4c7a-8baa-6d0f42392fa6 req-1f33cbaa-5a43-41c7-a4e5-538b4c8fba67 service nova] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Refreshing instance network info cache due to event network-changed-e925b30b-c1fc-4803-9254-ead1bd249612. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2148.697020] env[69784]: DEBUG oslo_concurrency.lockutils [req-abe9c7f1-0b19-4c7a-8baa-6d0f42392fa6 req-1f33cbaa-5a43-41c7-a4e5-538b4c8fba67 service nova] Acquiring lock "refresh_cache-e1969fb1-0f70-42c9-a362-f1efb6ee4619" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2148.697224] env[69784]: DEBUG oslo_concurrency.lockutils [req-abe9c7f1-0b19-4c7a-8baa-6d0f42392fa6 req-1f33cbaa-5a43-41c7-a4e5-538b4c8fba67 service nova] Acquired lock "refresh_cache-e1969fb1-0f70-42c9-a362-f1efb6ee4619" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2148.697274] env[69784]: DEBUG nova.network.neutron [req-abe9c7f1-0b19-4c7a-8baa-6d0f42392fa6 req-1f33cbaa-5a43-41c7-a4e5-538b4c8fba67 service nova] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Refreshing network info cache for port e925b30b-c1fc-4803-9254-ead1bd249612 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2148.934354] env[69784]: DEBUG nova.network.neutron [req-abe9c7f1-0b19-4c7a-8baa-6d0f42392fa6 req-1f33cbaa-5a43-41c7-a4e5-538b4c8fba67 service nova] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Updated VIF entry in instance network info cache for port e925b30b-c1fc-4803-9254-ead1bd249612. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2148.934720] env[69784]: DEBUG nova.network.neutron [req-abe9c7f1-0b19-4c7a-8baa-6d0f42392fa6 req-1f33cbaa-5a43-41c7-a4e5-538b4c8fba67 service nova] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Updating instance_info_cache with network_info: [{"id": "e925b30b-c1fc-4803-9254-ead1bd249612", "address": "fa:16:3e:05:1d:bb", "network": {"id": "089b8552-21a7-446a-93e8-08dfd5616726", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-692750947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0126dda00a44838ac749dee6f266970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape925b30b-c1", "ovs_interfaceid": "e925b30b-c1fc-4803-9254-ead1bd249612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2148.944280] env[69784]: DEBUG oslo_concurrency.lockutils [req-abe9c7f1-0b19-4c7a-8baa-6d0f42392fa6 req-1f33cbaa-5a43-41c7-a4e5-538b4c8fba67 service nova] Releasing lock "refresh_cache-e1969fb1-0f70-42c9-a362-f1efb6ee4619" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2188.851069] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.851467] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2189.841030] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.053183] env[69784]: WARNING oslo_vmware.rw_handles [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2191.053183] env[69784]: ERROR oslo_vmware.rw_handles [ 2191.053849] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2191.056251] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2191.056500] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Copying Virtual Disk [datastore1] vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/f060021f-1980-4e72-9c9b-df0838b5ac00/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2191.056878] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7898a90f-cfc1-4454-891f-fdc433b0b5bb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.065560] env[69784]: DEBUG oslo_vmware.api [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Waiting for the task: (returnval){ [ 2191.065560] env[69784]: value = "task-3467221" [ 2191.065560] env[69784]: _type = "Task" [ 2191.065560] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.073797] env[69784]: DEBUG oslo_vmware.api [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Task: {'id': task-3467221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.575895] env[69784]: DEBUG oslo_vmware.exceptions [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2191.576195] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2191.576765] env[69784]: ERROR nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2191.576765] env[69784]: Faults: ['InvalidArgument'] [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Traceback (most recent call last): [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] yield resources [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] self.driver.spawn(context, instance, image_meta, [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] self._fetch_image_if_missing(context, vi) [ 2191.576765] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] image_cache(vi, tmp_image_ds_loc) [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] vm_util.copy_virtual_disk( [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] session._wait_for_task(vmdk_copy_task) [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] return self.wait_for_task(task_ref) [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] return evt.wait() [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] result = hub.switch() [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2191.577137] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] return self.greenlet.switch() [ 2191.577563] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2191.577563] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] self.f(*self.args, **self.kw) [ 2191.577563] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2191.577563] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] raise exceptions.translate_fault(task_info.error) [ 2191.577563] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2191.577563] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Faults: ['InvalidArgument'] [ 2191.577563] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] [ 2191.577563] env[69784]: INFO nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Terminating instance [ 2191.578769] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2191.578933] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2191.579100] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-feee0522-19b0-4522-826f-0ff1e4f37247 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.581443] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2191.581635] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2191.582371] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e962cd0f-b097-4a2b-afbd-014f1662a658 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.588907] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2191.589118] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83cb35b0-1507-43d4-8b97-f71294498b01 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.591161] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2191.591349] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2191.592253] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03d66b0b-291b-412f-98f8-4a803d25afde {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.596915] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for the task: (returnval){ [ 2191.596915] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]527f24cb-5b61-8a2c-0101-ee8b45e9ac8d" [ 2191.596915] env[69784]: _type = "Task" [ 2191.596915] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.609875] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]527f24cb-5b61-8a2c-0101-ee8b45e9ac8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.657184] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2191.657399] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2191.657580] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Deleting the datastore file [datastore1] 7632e563-1790-442f-9e13-77f3d93e4223 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2191.657838] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89f7720d-0184-4f31-920a-b4527f37846e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.664507] env[69784]: DEBUG oslo_vmware.api [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Waiting for the task: (returnval){ [ 2191.664507] env[69784]: value = "task-3467223" [ 2191.664507] env[69784]: _type = "Task" [ 2191.664507] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.671952] env[69784]: DEBUG oslo_vmware.api [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Task: {'id': task-3467223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.839707] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2192.107018] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2192.107355] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Creating directory with path [datastore1] vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2192.107516] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a4e64b3-80c9-4aa1-8a68-191188cc0535 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.118269] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Created directory with path [datastore1] vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2192.118455] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Fetch image to [datastore1] vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2192.118619] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2192.119337] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3261194e-64f7-45ee-86ea-78eaa6d1e07b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.125627] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33fb6acd-32ae-45da-8303-f49da86fe005 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.134182] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56cd3af-4734-4c46-b698-3145ef68a66a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.165048] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34eadb1-4d84-4784-8084-f496de566a9e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.175502] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-085ae815-0dfd-4af9-822d-f5110529ddd3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.177106] env[69784]: DEBUG oslo_vmware.api [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Task: {'id': task-3467223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069785} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.177335] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2192.177508] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2192.177673] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2192.177843] env[69784]: INFO nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2192.179895] env[69784]: DEBUG nova.compute.claims [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2192.180104] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2192.180328] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2192.197740] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2192.247326] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2192.307011] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2192.307219] env[69784]: DEBUG oslo_vmware.rw_handles [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2192.405972] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57094f3b-cca5-4f1a-a55c-feb1ae7f742d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.413465] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76900b68-ce92-4ea0-aaa1-03021354958f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.442703] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c91e9a4-a340-4498-8096-d805ac01b7e5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.449497] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79137f3-7d36-4c3c-b4cf-b57937a31df0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.462990] env[69784]: DEBUG nova.compute.provider_tree [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2192.471704] env[69784]: DEBUG nova.scheduler.client.report [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2192.485529] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2192.486093] env[69784]: ERROR nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2192.486093] env[69784]: Faults: ['InvalidArgument'] [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Traceback (most recent call last): [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] self.driver.spawn(context, instance, image_meta, [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] self._fetch_image_if_missing(context, vi) [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] image_cache(vi, tmp_image_ds_loc) [ 2192.486093] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] vm_util.copy_virtual_disk( [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] session._wait_for_task(vmdk_copy_task) [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] return self.wait_for_task(task_ref) [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] return evt.wait() [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] result = hub.switch() [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] return self.greenlet.switch() [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2192.486442] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] self.f(*self.args, **self.kw) [ 2192.486804] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2192.486804] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] raise exceptions.translate_fault(task_info.error) [ 2192.486804] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2192.486804] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Faults: ['InvalidArgument'] [ 2192.486804] env[69784]: ERROR nova.compute.manager [instance: 7632e563-1790-442f-9e13-77f3d93e4223] [ 2192.486804] env[69784]: DEBUG nova.compute.utils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2192.488113] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Build of instance 7632e563-1790-442f-9e13-77f3d93e4223 was re-scheduled: A specified parameter was not correct: fileType [ 2192.488113] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2192.488475] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2192.488645] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2192.488813] env[69784]: DEBUG nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2192.488975] env[69784]: DEBUG nova.network.neutron [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2192.800179] env[69784]: DEBUG nova.network.neutron [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2192.812125] env[69784]: INFO nova.compute.manager [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Took 0.32 seconds to deallocate network for instance. [ 2192.910694] env[69784]: INFO nova.scheduler.client.report [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Deleted allocations for instance 7632e563-1790-442f-9e13-77f3d93e4223 [ 2192.930774] env[69784]: DEBUG oslo_concurrency.lockutils [None req-660e0c31-39f9-4ac7-b7d6-fbe4ff9e113a tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "7632e563-1790-442f-9e13-77f3d93e4223" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.783s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2192.931064] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "7632e563-1790-442f-9e13-77f3d93e4223" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 381.639s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2192.931295] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Acquiring lock "7632e563-1790-442f-9e13-77f3d93e4223-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2192.931506] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "7632e563-1790-442f-9e13-77f3d93e4223-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2192.931701] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "7632e563-1790-442f-9e13-77f3d93e4223-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2192.934054] env[69784]: INFO nova.compute.manager [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Terminating instance [ 2192.935820] env[69784]: DEBUG nova.compute.manager [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2192.936051] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2192.936571] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5c7c0a6-f42a-4c25-8474-2fb83a4e8e0e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.946370] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e693fb99-b71b-4e3f-9b9d-9fc94ea6f5c6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.975699] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7632e563-1790-442f-9e13-77f3d93e4223 could not be found. [ 2192.975908] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2192.976103] env[69784]: INFO nova.compute.manager [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2192.976349] env[69784]: DEBUG oslo.service.loopingcall [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2192.976564] env[69784]: DEBUG nova.compute.manager [-] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2192.976661] env[69784]: DEBUG nova.network.neutron [-] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2192.997988] env[69784]: DEBUG nova.network.neutron [-] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2193.005261] env[69784]: INFO nova.compute.manager [-] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] Took 0.03 seconds to deallocate network for instance. [ 2193.087827] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cdaa0d3c-3fba-4d5d-a4d8-5c645612f9d1 tempest-ServersNegativeTestJSON-20042301 tempest-ServersNegativeTestJSON-20042301-project-member] Lock "7632e563-1790-442f-9e13-77f3d93e4223" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.157s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2193.088707] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "7632e563-1790-442f-9e13-77f3d93e4223" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 315.198s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2193.088896] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 7632e563-1790-442f-9e13-77f3d93e4223] During sync_power_state the instance has a pending task (deleting). Skip. [ 2193.089095] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "7632e563-1790-442f-9e13-77f3d93e4223" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2193.839448] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2195.840541] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2196.840391] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2196.851141] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2196.851444] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2196.851601] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2196.851755] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2196.853224] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded3322d-2e47-47af-b866-d8d2b3627ff4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.861767] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d441ab6-05d6-43f5-9726-f234450fd39d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.876805] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761b3e42-9306-4248-b317-a128019f3a1b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.883035] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12228dc0-3276-4670-8494-9ac5ad9c0dcb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.913044] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180948MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2196.913215] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2196.913435] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2196.980026] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.980178] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.980306] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.980429] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.980546] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.980661] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.980778] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e35ffffb-c0fd-4236-b489-80eb0fdb4e37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.980906] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 86a94763-92c8-4689-b37a-3dc6c1ec744c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.981036] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e1969fb1-0f70-42c9-a362-f1efb6ee4619 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2196.981245] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2196.981397] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2197.089559] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576ec6a5-df87-4bfc-ae74-0c18ffce2060 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.097520] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e44a2b-a761-4cc2-a8d8-88bf01707814 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.127727] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d611fcd2-ab74-4c14-ae74-919ede74317e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.134634] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3004f09-3cde-4d54-9208-df3e9995134a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.147309] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2197.155837] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2197.170977] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2197.171168] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.258s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2199.165557] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2199.841102] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2199.841102] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2199.841102] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2199.859614] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.859789] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.859928] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.860098] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.860263] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.860407] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.860558] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.860713] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.860843] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2199.860991] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2199.861516] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.855733] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.268183] env[69784]: DEBUG oslo_concurrency.lockutils [None req-13c27f93-0e28-4d96-bc74-9b648cfd732a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "e35ffffb-c0fd-4236-b489-80eb0fdb4e37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2240.913791] env[69784]: WARNING oslo_vmware.rw_handles [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2240.913791] env[69784]: ERROR oslo_vmware.rw_handles [ 2240.914440] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2240.916257] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2240.916497] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Copying Virtual Disk [datastore1] vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/42c91bc1-2949-4d75-9d6c-2a7e5bca60de/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2240.916806] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa36cea2-0942-406e-b1dc-88c87c750e11 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.925157] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for the task: (returnval){ [ 2240.925157] env[69784]: value = "task-3467224" [ 2240.925157] env[69784]: _type = "Task" [ 2240.925157] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.933698] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Task: {'id': task-3467224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.435569] env[69784]: DEBUG oslo_vmware.exceptions [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2241.435833] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2241.436409] env[69784]: ERROR nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2241.436409] env[69784]: Faults: ['InvalidArgument'] [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Traceback (most recent call last): [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] yield resources [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] self.driver.spawn(context, instance, image_meta, [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] self._fetch_image_if_missing(context, vi) [ 2241.436409] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] image_cache(vi, tmp_image_ds_loc) [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] vm_util.copy_virtual_disk( [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] session._wait_for_task(vmdk_copy_task) [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] return self.wait_for_task(task_ref) [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] return evt.wait() [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] result = hub.switch() [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2241.436717] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] return self.greenlet.switch() [ 2241.437065] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2241.437065] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] self.f(*self.args, **self.kw) [ 2241.437065] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2241.437065] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] raise exceptions.translate_fault(task_info.error) [ 2241.437065] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2241.437065] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Faults: ['InvalidArgument'] [ 2241.437065] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] [ 2241.437065] env[69784]: INFO nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Terminating instance [ 2241.438275] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2241.438484] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2241.438711] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca587ce5-2678-47b4-9190-91aa2c32e0e3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.440994] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2241.441203] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2241.441888] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36497a52-dc2a-450e-9af0-4b58d0b9c68d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.448641] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2241.448844] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f56592b3-ed85-4e76-9267-b406f3a3bdd8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.450832] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2241.451011] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2241.451918] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-534bfa77-6727-427d-8a6d-5db6af40f592 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.456965] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Waiting for the task: (returnval){ [ 2241.456965] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52ae083e-766f-1c97-31c5-adbd2c2ad52e" [ 2241.456965] env[69784]: _type = "Task" [ 2241.456965] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2241.463638] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52ae083e-766f-1c97-31c5-adbd2c2ad52e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.517011] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2241.517254] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2241.517433] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Deleting the datastore file [datastore1] 871e3b73-d4d8-4081-8b92-0dee212d8961 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2241.517693] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45e9f69a-d8e3-455b-84ff-19fa14db6f28 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.524731] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for the task: (returnval){ [ 2241.524731] env[69784]: value = "task-3467226" [ 2241.524731] env[69784]: _type = "Task" [ 2241.524731] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2241.532784] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Task: {'id': task-3467226, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.967641] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2241.967978] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Creating directory with path [datastore1] vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2241.968144] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bd19c90-6994-45e6-929d-7e87db2a71a0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.980203] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Created directory with path [datastore1] vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2241.980302] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Fetch image to [datastore1] vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2241.980475] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2241.981195] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647bdbd7-238d-49ee-a990-84ca3740ff2f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.987772] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6016d8-755f-49e6-90c7-6b35db7ab422 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.996495] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a186b0-66fd-4985-b6f2-8bb3b1c0c135 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.028625] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56912e8-2dcc-431d-b19e-fe173f9113dc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.036187] env[69784]: DEBUG oslo_vmware.api [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Task: {'id': task-3467226, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077743} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2242.037499] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2242.037686] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2242.037852] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2242.038034] env[69784]: INFO nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2242.039709] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4194c17f-d56d-4203-9de8-3e84b4a32241 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.041487] env[69784]: DEBUG nova.compute.claims [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2242.041656] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2242.041885] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2242.063200] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2242.143746] env[69784]: DEBUG oslo_vmware.rw_handles [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2242.205464] env[69784]: DEBUG oslo_vmware.rw_handles [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2242.205677] env[69784]: DEBUG oslo_vmware.rw_handles [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2242.256721] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af13663c-ab00-4647-a124-7facef5dbc9b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.263399] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2859098-71e4-4f84-b163-52096c5791ff {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.291959] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072ef984-1029-4961-8323-612f1795b926 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.298258] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19a53a6-6603-4305-bde1-6317267c9245 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.310361] env[69784]: DEBUG nova.compute.provider_tree [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2242.319280] env[69784]: DEBUG nova.scheduler.client.report [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2242.333256] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.291s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2242.333764] env[69784]: ERROR nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2242.333764] env[69784]: Faults: ['InvalidArgument'] [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Traceback (most recent call last): [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] self.driver.spawn(context, instance, image_meta, [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] self._fetch_image_if_missing(context, vi) [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] image_cache(vi, tmp_image_ds_loc) [ 2242.333764] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] vm_util.copy_virtual_disk( [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] session._wait_for_task(vmdk_copy_task) [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] return self.wait_for_task(task_ref) [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] return evt.wait() [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] result = hub.switch() [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] return self.greenlet.switch() [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2242.334150] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] self.f(*self.args, **self.kw) [ 2242.334491] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2242.334491] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] raise exceptions.translate_fault(task_info.error) [ 2242.334491] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2242.334491] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Faults: ['InvalidArgument'] [ 2242.334491] env[69784]: ERROR nova.compute.manager [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] [ 2242.334491] env[69784]: DEBUG nova.compute.utils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2242.335764] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Build of instance 871e3b73-d4d8-4081-8b92-0dee212d8961 was re-scheduled: A specified parameter was not correct: fileType [ 2242.335764] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2242.336161] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2242.336336] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2242.336504] env[69784]: DEBUG nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2242.336662] env[69784]: DEBUG nova.network.neutron [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2242.644029] env[69784]: DEBUG nova.network.neutron [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2242.656265] env[69784]: INFO nova.compute.manager [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Took 0.32 seconds to deallocate network for instance. [ 2242.756855] env[69784]: INFO nova.scheduler.client.report [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Deleted allocations for instance 871e3b73-d4d8-4081-8b92-0dee212d8961 [ 2242.778115] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a2d39c20-6dea-4ddf-a3c1-ef4bd735480d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.604s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2242.778379] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.050s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2242.778595] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "871e3b73-d4d8-4081-8b92-0dee212d8961-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2242.778804] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2242.778970] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2242.780984] env[69784]: INFO nova.compute.manager [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Terminating instance [ 2242.782843] env[69784]: DEBUG nova.compute.manager [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2242.783107] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2242.783610] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bb9cba3-063d-4717-b659-122020264332 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.794588] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc183b96-ab6a-43a7-b8ea-c6fe7fbf7ac2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.822332] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 871e3b73-d4d8-4081-8b92-0dee212d8961 could not be found. [ 2242.822536] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2242.822748] env[69784]: INFO nova.compute.manager [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2242.823055] env[69784]: DEBUG oslo.service.loopingcall [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2242.823309] env[69784]: DEBUG nova.compute.manager [-] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2242.823406] env[69784]: DEBUG nova.network.neutron [-] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2242.846648] env[69784]: DEBUG nova.network.neutron [-] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2242.855046] env[69784]: INFO nova.compute.manager [-] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] Took 0.03 seconds to deallocate network for instance. [ 2242.958976] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b5ee790-2cfa-444b-ae21-30161de2c96a tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2242.959878] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 365.069s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2242.960040] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 871e3b73-d4d8-4081-8b92-0dee212d8961] During sync_power_state the instance has a pending task (deleting). Skip. [ 2242.960223] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "871e3b73-d4d8-4081-8b92-0dee212d8961" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2250.840108] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2250.840108] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2251.839955] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2252.839850] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2254.840582] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2257.839589] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.840370] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.856174] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2258.856400] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2258.856564] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2258.856719] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2258.858254] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9524f07-c333-4ea3-a95e-9dd8d406260b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.866758] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2afe2c-b106-4a9b-ad6f-0525480d3992 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.880826] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251bd34d-dbee-4390-a1c6-9647826f88b1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.887078] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9866821-9c05-4604-ad76-9dd4730b1719 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.916872] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180953MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2258.917039] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2258.917231] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2258.986969] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2258.987163] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2258.987298] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2258.987420] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2258.987538] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2258.987654] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e35ffffb-c0fd-4236-b489-80eb0fdb4e37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2258.987770] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 86a94763-92c8-4689-b37a-3dc6c1ec744c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2258.987887] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e1969fb1-0f70-42c9-a362-f1efb6ee4619 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2258.988087] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2258.988232] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2259.081902] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72dc346-972c-4bb4-b1bc-c6a016746adb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.089452] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475ce077-3789-404d-a52b-cd92fc0c118a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.119054] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea685152-ea24-467a-8510-989a4012b349 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.125752] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ef9e8b-f5bb-47f8-beb6-b8b032c1597b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.138061] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2259.145912] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2259.159605] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2259.159787] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.243s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2260.154560] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.154910] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.154952] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2260.156117] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2260.176106] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2260.176106] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2260.176106] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2260.176106] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2260.176361] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2260.176361] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2260.176433] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2260.176571] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2260.176643] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2260.177120] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2286.548631] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "265a4f1d-8214-42ac-bbd0-4c3758ea6fed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2286.549017] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Lock "265a4f1d-8214-42ac-bbd0-4c3758ea6fed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2286.560535] env[69784]: DEBUG nova.compute.manager [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2286.610106] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2286.610372] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2286.612037] env[69784]: INFO nova.compute.claims [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2286.806183] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a8b477-1111-4146-955e-4b35cd99663d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.813064] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4f2c52-8b2d-426d-9124-2bdc0c9736a9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.844510] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1904a69-e92a-430e-b6ee-e2f1295a8041 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.852345] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf909be0-a923-4ae0-a1ae-efdb9a22702d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.867618] env[69784]: DEBUG nova.compute.provider_tree [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2286.876788] env[69784]: DEBUG nova.scheduler.client.report [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2286.892731] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.282s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2286.893230] env[69784]: DEBUG nova.compute.manager [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2286.913783] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2286.914014] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Lock "74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2286.925392] env[69784]: DEBUG nova.compute.manager [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2286.930037] env[69784]: DEBUG nova.compute.utils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2286.931571] env[69784]: DEBUG nova.compute.manager [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2286.931758] env[69784]: DEBUG nova.network.neutron [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2286.939033] env[69784]: DEBUG nova.compute.manager [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2286.991500] env[69784]: DEBUG nova.policy [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f0a600fd9a24e4290074de1d4d9ac2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23adb692668941fdbc15f554063d3cd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 2287.006261] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2287.006516] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2287.008008] env[69784]: INFO nova.compute.claims [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2287.024691] env[69784]: DEBUG nova.compute.manager [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2287.049875] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2287.050124] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2287.050484] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2287.050484] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2287.050600] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2287.050751] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2287.051345] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2287.051345] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2287.051345] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2287.051567] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2287.051606] env[69784]: DEBUG nova.virt.hardware [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2287.052573] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa16e0e9-32c1-4e0b-be21-e78f54f2120b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.064505] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1bd855-4000-4ad1-9bd9-f1435d3599bb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.227283] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40010c6a-1483-4197-8748-4c1251cbffb5 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.234756] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab961f8-f7c9-4dbf-8607-a818ee05b9ee {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.266786] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07503a03-fbf0-456f-a46f-cb135996019c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.274158] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bf59c6-ba42-4769-9b3d-808f1b5ebdec {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.287345] env[69784]: DEBUG nova.compute.provider_tree [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2287.296327] env[69784]: DEBUG nova.scheduler.client.report [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2287.312688] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2287.313245] env[69784]: DEBUG nova.compute.manager [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2287.354803] env[69784]: DEBUG nova.compute.utils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2287.354803] env[69784]: DEBUG nova.compute.manager [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2287.354803] env[69784]: DEBUG nova.network.neutron [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2287.366152] env[69784]: DEBUG nova.compute.manager [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2287.419337] env[69784]: DEBUG nova.network.neutron [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Successfully created port: c8f5e0cc-c4df-4a21-849c-1ba19b0d008c {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2287.435090] env[69784]: DEBUG nova.compute.manager [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2287.444395] env[69784]: DEBUG nova.policy [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f0a600fd9a24e4290074de1d4d9ac2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23adb692668941fdbc15f554063d3cd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 2287.461308] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2287.461555] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2287.461713] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2287.461896] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2287.462054] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2287.462203] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2287.462413] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2287.462655] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2287.462829] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2287.462993] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2287.463181] env[69784]: DEBUG nova.virt.hardware [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2287.464044] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ac562c-3d69-4a9c-ad1b-d31d3a6a85f0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.476019] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c43885-b3fc-44ae-81a6-9b5c14f2239f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.925553] env[69784]: DEBUG nova.network.neutron [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Successfully created port: e0260580-8a07-4bd4-9bc3-f7841ba16e29 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2288.283261] env[69784]: DEBUG nova.compute.manager [req-a57348c8-2f99-47a6-b562-d9665b01e275 req-5fdfea09-1ced-4cc2-b44d-e784dadf2ea6 service nova] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Received event network-vif-plugged-c8f5e0cc-c4df-4a21-849c-1ba19b0d008c {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2288.283261] env[69784]: DEBUG oslo_concurrency.lockutils [req-a57348c8-2f99-47a6-b562-d9665b01e275 req-5fdfea09-1ced-4cc2-b44d-e784dadf2ea6 service nova] Acquiring lock "265a4f1d-8214-42ac-bbd0-4c3758ea6fed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2288.283261] env[69784]: DEBUG oslo_concurrency.lockutils [req-a57348c8-2f99-47a6-b562-d9665b01e275 req-5fdfea09-1ced-4cc2-b44d-e784dadf2ea6 service nova] Lock "265a4f1d-8214-42ac-bbd0-4c3758ea6fed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2288.283261] env[69784]: DEBUG oslo_concurrency.lockutils [req-a57348c8-2f99-47a6-b562-d9665b01e275 req-5fdfea09-1ced-4cc2-b44d-e784dadf2ea6 service nova] Lock "265a4f1d-8214-42ac-bbd0-4c3758ea6fed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2288.283924] env[69784]: DEBUG nova.compute.manager [req-a57348c8-2f99-47a6-b562-d9665b01e275 req-5fdfea09-1ced-4cc2-b44d-e784dadf2ea6 service nova] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] No waiting events found dispatching network-vif-plugged-c8f5e0cc-c4df-4a21-849c-1ba19b0d008c {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2288.284256] env[69784]: WARNING nova.compute.manager [req-a57348c8-2f99-47a6-b562-d9665b01e275 req-5fdfea09-1ced-4cc2-b44d-e784dadf2ea6 service nova] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Received unexpected event network-vif-plugged-c8f5e0cc-c4df-4a21-849c-1ba19b0d008c for instance with vm_state building and task_state spawning. [ 2288.650431] env[69784]: DEBUG nova.network.neutron [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Successfully updated port: c8f5e0cc-c4df-4a21-849c-1ba19b0d008c {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2288.663203] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "refresh_cache-265a4f1d-8214-42ac-bbd0-4c3758ea6fed" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2288.663376] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquired lock "refresh_cache-265a4f1d-8214-42ac-bbd0-4c3758ea6fed" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2288.663531] env[69784]: DEBUG nova.network.neutron [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2288.701794] env[69784]: DEBUG nova.network.neutron [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2288.960952] env[69784]: DEBUG nova.network.neutron [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Updating instance_info_cache with network_info: [{"id": "c8f5e0cc-c4df-4a21-849c-1ba19b0d008c", "address": "fa:16:3e:74:21:2e", "network": {"id": "e672b001-fb32-4e4d-b85e-cdf243147769", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-352509944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23adb692668941fdbc15f554063d3cd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8f5e0cc-c4", "ovs_interfaceid": "c8f5e0cc-c4df-4a21-849c-1ba19b0d008c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2288.971634] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Releasing lock "refresh_cache-265a4f1d-8214-42ac-bbd0-4c3758ea6fed" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2288.971923] env[69784]: DEBUG nova.compute.manager [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Instance network_info: |[{"id": "c8f5e0cc-c4df-4a21-849c-1ba19b0d008c", "address": "fa:16:3e:74:21:2e", "network": {"id": "e672b001-fb32-4e4d-b85e-cdf243147769", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-352509944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23adb692668941fdbc15f554063d3cd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8f5e0cc-c4", "ovs_interfaceid": "c8f5e0cc-c4df-4a21-849c-1ba19b0d008c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2288.972386] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:21:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8f5e0cc-c4df-4a21-849c-1ba19b0d008c', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2288.980014] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Creating folder: Project (23adb692668941fdbc15f554063d3cd2). Parent ref: group-v692547. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2288.980525] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c41ed40-1102-4c5f-bea9-fbfeafc181cb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.992211] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Created folder: Project (23adb692668941fdbc15f554063d3cd2) in parent group-v692547. [ 2288.992387] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Creating folder: Instances. Parent ref: group-v692660. {{(pid=69784) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2288.992595] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab2f4442-9869-4f5a-9cdd-7f4671bf3453 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.000978] env[69784]: INFO nova.virt.vmwareapi.vm_util [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Created folder: Instances in parent group-v692660. [ 2289.001204] env[69784]: DEBUG oslo.service.loopingcall [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2289.001375] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2289.001558] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df88801a-7000-474a-89d3-9a39851e7d6b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.020166] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2289.020166] env[69784]: value = "task-3467229" [ 2289.020166] env[69784]: _type = "Task" [ 2289.020166] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.027249] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467229, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.057945] env[69784]: DEBUG nova.network.neutron [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Successfully updated port: e0260580-8a07-4bd4-9bc3-f7841ba16e29 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2289.065649] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "refresh_cache-74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2289.065939] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquired lock "refresh_cache-74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2289.066218] env[69784]: DEBUG nova.network.neutron [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2289.106989] env[69784]: DEBUG nova.network.neutron [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2289.307135] env[69784]: DEBUG nova.network.neutron [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Updating instance_info_cache with network_info: [{"id": "e0260580-8a07-4bd4-9bc3-f7841ba16e29", "address": "fa:16:3e:41:ca:1c", "network": {"id": "e672b001-fb32-4e4d-b85e-cdf243147769", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-352509944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23adb692668941fdbc15f554063d3cd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0260580-8a", "ovs_interfaceid": "e0260580-8a07-4bd4-9bc3-f7841ba16e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2289.319640] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Releasing lock "refresh_cache-74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2289.319931] env[69784]: DEBUG nova.compute.manager [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Instance network_info: |[{"id": "e0260580-8a07-4bd4-9bc3-f7841ba16e29", "address": "fa:16:3e:41:ca:1c", "network": {"id": "e672b001-fb32-4e4d-b85e-cdf243147769", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-352509944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23adb692668941fdbc15f554063d3cd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0260580-8a", "ovs_interfaceid": "e0260580-8a07-4bd4-9bc3-f7841ba16e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2289.320327] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:ca:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0260580-8a07-4bd4-9bc3-f7841ba16e29', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2289.327922] env[69784]: DEBUG oslo.service.loopingcall [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2289.328380] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2289.328597] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b99f48d6-a979-455a-b7fe-30419461b851 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.348745] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2289.348745] env[69784]: value = "task-3467230" [ 2289.348745] env[69784]: _type = "Task" [ 2289.348745] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.356110] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467230, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.530853] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467229, 'name': CreateVM_Task, 'duration_secs': 0.294636} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.531070] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2289.531703] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2289.531862] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2289.532209] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2289.532523] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b98ec88a-5790-4235-a513-f0a5cda6e1ec {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.537113] env[69784]: DEBUG oslo_vmware.api [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Waiting for the task: (returnval){ [ 2289.537113] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52731c55-bc03-d723-4ae1-225ac4000f9b" [ 2289.537113] env[69784]: _type = "Task" [ 2289.537113] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.544655] env[69784]: DEBUG oslo_vmware.api [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52731c55-bc03-d723-4ae1-225ac4000f9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.859163] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467230, 'name': CreateVM_Task, 'duration_secs': 0.297553} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.859312] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2289.859930] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2290.048069] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2290.048384] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2290.048517] env[69784]: DEBUG oslo_concurrency.lockutils [None req-4547aaf1-c869-4285-b20c-3fb34d4292c7 tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2290.048724] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2290.049047] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2290.049293] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a34d35d-f779-4fc7-b7bc-55a1223fb198 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.053554] env[69784]: DEBUG oslo_vmware.api [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Waiting for the task: (returnval){ [ 2290.053554] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52505d6c-9b9a-7da8-359f-70a96363b11c" [ 2290.053554] env[69784]: _type = "Task" [ 2290.053554] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.062322] env[69784]: DEBUG oslo_vmware.api [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52505d6c-9b9a-7da8-359f-70a96363b11c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.314954] env[69784]: DEBUG nova.compute.manager [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Received event network-changed-c8f5e0cc-c4df-4a21-849c-1ba19b0d008c {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2290.315189] env[69784]: DEBUG nova.compute.manager [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Refreshing instance network info cache due to event network-changed-c8f5e0cc-c4df-4a21-849c-1ba19b0d008c. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2290.315413] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Acquiring lock "refresh_cache-265a4f1d-8214-42ac-bbd0-4c3758ea6fed" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2290.315576] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Acquired lock "refresh_cache-265a4f1d-8214-42ac-bbd0-4c3758ea6fed" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2290.315748] env[69784]: DEBUG nova.network.neutron [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Refreshing network info cache for port c8f5e0cc-c4df-4a21-849c-1ba19b0d008c {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2290.563342] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2290.563587] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2290.563852] env[69784]: DEBUG oslo_concurrency.lockutils [None req-22542529-1782-4837-888d-6e290390b36e tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2290.576943] env[69784]: DEBUG nova.network.neutron [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Updated VIF entry in instance network info cache for port c8f5e0cc-c4df-4a21-849c-1ba19b0d008c. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2290.577292] env[69784]: DEBUG nova.network.neutron [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Updating instance_info_cache with network_info: [{"id": "c8f5e0cc-c4df-4a21-849c-1ba19b0d008c", "address": "fa:16:3e:74:21:2e", "network": {"id": "e672b001-fb32-4e4d-b85e-cdf243147769", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-352509944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23adb692668941fdbc15f554063d3cd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8f5e0cc-c4", "ovs_interfaceid": "c8f5e0cc-c4df-4a21-849c-1ba19b0d008c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2290.586307] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Releasing lock "refresh_cache-265a4f1d-8214-42ac-bbd0-4c3758ea6fed" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2290.586523] env[69784]: DEBUG nova.compute.manager [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Received event network-vif-plugged-e0260580-8a07-4bd4-9bc3-f7841ba16e29 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2290.586742] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Acquiring lock "74fe4356-3f52-4f4c-8bad-b065c2c3ac0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2290.586964] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Lock "74fe4356-3f52-4f4c-8bad-b065c2c3ac0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2290.587146] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Lock "74fe4356-3f52-4f4c-8bad-b065c2c3ac0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2290.587314] env[69784]: DEBUG nova.compute.manager [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] No waiting events found dispatching network-vif-plugged-e0260580-8a07-4bd4-9bc3-f7841ba16e29 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2290.587478] env[69784]: WARNING nova.compute.manager [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Received unexpected event network-vif-plugged-e0260580-8a07-4bd4-9bc3-f7841ba16e29 for instance with vm_state building and task_state spawning. [ 2290.587639] env[69784]: DEBUG nova.compute.manager [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Received event network-changed-e0260580-8a07-4bd4-9bc3-f7841ba16e29 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2290.587788] env[69784]: DEBUG nova.compute.manager [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Refreshing instance network info cache due to event network-changed-e0260580-8a07-4bd4-9bc3-f7841ba16e29. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2290.587963] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Acquiring lock "refresh_cache-74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2290.588109] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Acquired lock "refresh_cache-74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2290.588262] env[69784]: DEBUG nova.network.neutron [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Refreshing network info cache for port e0260580-8a07-4bd4-9bc3-f7841ba16e29 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2290.869381] env[69784]: DEBUG nova.network.neutron [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Updated VIF entry in instance network info cache for port e0260580-8a07-4bd4-9bc3-f7841ba16e29. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2290.869744] env[69784]: DEBUG nova.network.neutron [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Updating instance_info_cache with network_info: [{"id": "e0260580-8a07-4bd4-9bc3-f7841ba16e29", "address": "fa:16:3e:41:ca:1c", "network": {"id": "e672b001-fb32-4e4d-b85e-cdf243147769", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-352509944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23adb692668941fdbc15f554063d3cd2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0260580-8a", "ovs_interfaceid": "e0260580-8a07-4bd4-9bc3-f7841ba16e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2290.879830] env[69784]: DEBUG oslo_concurrency.lockutils [req-3f4f4fb5-50d5-4b9f-96ba-d94e3ffc756a req-f21ddf8f-7dec-4dbb-b507-60cbf817a776 service nova] Releasing lock "refresh_cache-74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2291.723923] env[69784]: WARNING oslo_vmware.rw_handles [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2291.723923] env[69784]: ERROR oslo_vmware.rw_handles [ 2291.724934] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2291.726730] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2291.726974] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Copying Virtual Disk [datastore1] vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/0f3a8314-0f85-4f68-9111-ed690d974935/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2291.727285] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c2e1220-257c-4c55-b3be-f531037086fa {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.735155] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Waiting for the task: (returnval){ [ 2291.735155] env[69784]: value = "task-3467231" [ 2291.735155] env[69784]: _type = "Task" [ 2291.735155] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.743016] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Task: {'id': task-3467231, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.245950] env[69784]: DEBUG oslo_vmware.exceptions [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2292.246247] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2292.246795] env[69784]: ERROR nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2292.246795] env[69784]: Faults: ['InvalidArgument'] [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Traceback (most recent call last): [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] yield resources [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] self.driver.spawn(context, instance, image_meta, [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] self._fetch_image_if_missing(context, vi) [ 2292.246795] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] image_cache(vi, tmp_image_ds_loc) [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] vm_util.copy_virtual_disk( [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] session._wait_for_task(vmdk_copy_task) [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] return self.wait_for_task(task_ref) [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] return evt.wait() [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] result = hub.switch() [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2292.247223] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] return self.greenlet.switch() [ 2292.247517] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2292.247517] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] self.f(*self.args, **self.kw) [ 2292.247517] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2292.247517] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] raise exceptions.translate_fault(task_info.error) [ 2292.247517] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2292.247517] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Faults: ['InvalidArgument'] [ 2292.247517] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] [ 2292.247517] env[69784]: INFO nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Terminating instance [ 2292.248651] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2292.248857] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2292.249097] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75ff8ef6-d8ec-4ac9-823a-095597759e25 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.251193] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2292.251384] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2292.252080] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25eb97c-e546-4501-9d6e-a1da1634c204 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.258834] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2292.259049] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0310f6cb-ff98-41e1-a041-213bbbfe2c3b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.261166] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2292.261336] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2292.262294] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faf7eb9e-7dc0-450b-af2c-e58517f21bb7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.266726] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Waiting for the task: (returnval){ [ 2292.266726] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]521bf0cb-a90f-e5ce-59a2-c3d50c3c0e77" [ 2292.266726] env[69784]: _type = "Task" [ 2292.266726] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.277163] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]521bf0cb-a90f-e5ce-59a2-c3d50c3c0e77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.339213] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2292.339528] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2292.339620] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Deleting the datastore file [datastore1] ae4e1119-10e5-42fe-bb57-6bcb2c54d90b {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2292.339869] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7573c2fd-b82e-492e-85b8-0153acacb233 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.346226] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Waiting for the task: (returnval){ [ 2292.346226] env[69784]: value = "task-3467233" [ 2292.346226] env[69784]: _type = "Task" [ 2292.346226] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.353580] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Task: {'id': task-3467233, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.777451] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2292.777788] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Creating directory with path [datastore1] vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2292.777981] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-217addd2-239d-4ca6-886c-416c1991ed61 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.789315] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Created directory with path [datastore1] vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2292.789476] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Fetch image to [datastore1] vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2292.789619] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2292.790334] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dcd33c-5c1f-457d-b154-2cac2a1923cb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.797915] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49e8412-d5e6-45c0-8602-67598c7382fc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.806764] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02975d90-7fc4-4d32-983f-cb3aadea2e8a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.836325] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff8fb3e-da17-4ef5-969a-e8ca2576126e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.841753] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d609c366-e7a0-4488-9c17-9be8199f4c2e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.853875] env[69784]: DEBUG oslo_vmware.api [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Task: {'id': task-3467233, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080106} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.854117] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2292.854297] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2292.854463] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2292.854699] env[69784]: INFO nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2292.856763] env[69784]: DEBUG nova.compute.claims [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2292.856927] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2292.857156] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2292.862499] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2292.912577] env[69784]: DEBUG oslo_vmware.rw_handles [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2292.973040] env[69784]: DEBUG oslo_vmware.rw_handles [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2292.973234] env[69784]: DEBUG oslo_vmware.rw_handles [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2293.075791] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf06ffbd-6de2-4320-8b6a-1031b7e04635 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.083304] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f507ae0c-37da-4e3c-9a7c-9ee8855297a9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.113251] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f87b7c2-b64e-44c8-b509-f49c079b61bd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.120308] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fa7bfb-2f23-4694-8683-2e7c45a9b7d1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.133007] env[69784]: DEBUG nova.compute.provider_tree [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2293.140797] env[69784]: DEBUG nova.scheduler.client.report [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2293.155715] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2293.156237] env[69784]: ERROR nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2293.156237] env[69784]: Faults: ['InvalidArgument'] [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Traceback (most recent call last): [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] self.driver.spawn(context, instance, image_meta, [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] self._fetch_image_if_missing(context, vi) [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] image_cache(vi, tmp_image_ds_loc) [ 2293.156237] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] vm_util.copy_virtual_disk( [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] session._wait_for_task(vmdk_copy_task) [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] return self.wait_for_task(task_ref) [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] return evt.wait() [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] result = hub.switch() [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] return self.greenlet.switch() [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2293.156586] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] self.f(*self.args, **self.kw) [ 2293.156946] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2293.156946] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] raise exceptions.translate_fault(task_info.error) [ 2293.156946] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2293.156946] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Faults: ['InvalidArgument'] [ 2293.156946] env[69784]: ERROR nova.compute.manager [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] [ 2293.156946] env[69784]: DEBUG nova.compute.utils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2293.158250] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Build of instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b was re-scheduled: A specified parameter was not correct: fileType [ 2293.158250] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2293.158605] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2293.158775] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2293.158939] env[69784]: DEBUG nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2293.159123] env[69784]: DEBUG nova.network.neutron [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2293.540785] env[69784]: DEBUG nova.network.neutron [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2293.557063] env[69784]: INFO nova.compute.manager [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Took 0.40 seconds to deallocate network for instance. [ 2293.652690] env[69784]: INFO nova.scheduler.client.report [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Deleted allocations for instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b [ 2293.678572] env[69784]: DEBUG oslo_concurrency.lockutils [None req-503353a0-93d5-4653-b1d1-e839eb533594 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.815s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2293.678846] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.524s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2293.679084] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Acquiring lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2293.679304] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2293.679479] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2293.681733] env[69784]: INFO nova.compute.manager [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Terminating instance [ 2293.683867] env[69784]: DEBUG nova.compute.manager [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2293.684092] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2293.684857] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e08abb4e-adc3-48de-9cb7-0f58b1d5e914 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.696126] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62af920c-e728-4820-8ad8-dc9d31fe1270 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.726347] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ae4e1119-10e5-42fe-bb57-6bcb2c54d90b could not be found. [ 2293.726515] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2293.726881] env[69784]: INFO nova.compute.manager [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2293.727035] env[69784]: DEBUG oslo.service.loopingcall [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2293.727636] env[69784]: DEBUG nova.compute.manager [-] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2293.727636] env[69784]: DEBUG nova.network.neutron [-] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2293.754485] env[69784]: DEBUG nova.network.neutron [-] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2293.764618] env[69784]: INFO nova.compute.manager [-] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] Took 0.04 seconds to deallocate network for instance. [ 2293.876210] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3734dffc-08fc-48d7-92ee-316e693d4475 tempest-InstanceActionsNegativeTestJSON-317807868 tempest-InstanceActionsNegativeTestJSON-317807868-project-member] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2293.877202] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 415.986s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2293.877417] env[69784]: INFO nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ae4e1119-10e5-42fe-bb57-6bcb2c54d90b] During sync_power_state the instance has a pending task (deleting). Skip. [ 2293.877980] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "ae4e1119-10e5-42fe-bb57-6bcb2c54d90b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2310.839380] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2310.839741] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2313.840448] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2313.840752] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2315.839597] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2317.841665] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.835585] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.840484] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.840796] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2320.840796] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2320.862372] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.862511] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.862646] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.862773] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.862927] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.863077] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.863203] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.863321] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.863437] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2320.863556] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2320.864045] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.864228] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.874265] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2320.874427] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2320.874586] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2320.874734] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2320.875805] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020c2ec9-f220-4992-b0ce-9271dec8ae1e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.884453] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16638745-a4f1-45a6-bb9b-6cd0d42ad451 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.897997] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc8ab43-7cc1-457a-9ce1-1736281abd9b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.903752] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23590459-8af3-40bf-82d7-6ba4a0b86efc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.931240] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180952MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2320.931378] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2320.931556] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2320.998845] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 4479a824-1f93-45d0-953f-57736580d86f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2320.998999] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2320.999142] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2320.999263] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2320.999383] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e35ffffb-c0fd-4236-b489-80eb0fdb4e37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2320.999497] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 86a94763-92c8-4689-b37a-3dc6c1ec744c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2320.999608] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e1969fb1-0f70-42c9-a362-f1efb6ee4619 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2320.999721] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 265a4f1d-8214-42ac-bbd0-4c3758ea6fed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2320.999830] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2321.000009] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2321.000152] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2321.102997] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d763e04d-5e57-4b28-b1d2-8dfea0c3c0b0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.109986] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cefb97c-24c4-4840-a6a3-bc8d5d8d512b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.141261] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480dff95-7940-444b-950a-833b5cac57dc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.148299] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b2a565-bce5-4547-b439-9bfdb864e6de {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.160806] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2321.168786] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2321.180930] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2321.181122] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.250s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2325.229624] env[69784]: DEBUG oslo_concurrency.lockutils [None req-d04f6b43-7a7c-46e6-b070-83a59eedeecd tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "86a94763-92c8-4689-b37a-3dc6c1ec744c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2326.175623] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.303571] env[69784]: DEBUG oslo_concurrency.lockutils [None req-a0ea12a7-a0c7-4123-b749-1e6ef72f89e7 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "e1969fb1-0f70-42c9-a362-f1efb6ee4619" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2342.895272] env[69784]: WARNING oslo_vmware.rw_handles [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2342.895272] env[69784]: ERROR oslo_vmware.rw_handles [ 2342.895902] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2342.897774] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2342.898022] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Copying Virtual Disk [datastore1] vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/204e9700-23db-414a-95f5-4d3a1942422f/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2342.898302] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee86258c-36a6-4b18-a55b-d52f9f3452d6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.907187] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Waiting for the task: (returnval){ [ 2342.907187] env[69784]: value = "task-3467234" [ 2342.907187] env[69784]: _type = "Task" [ 2342.907187] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2342.915042] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Task: {'id': task-3467234, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2343.417756] env[69784]: DEBUG oslo_vmware.exceptions [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2343.418049] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2343.418592] env[69784]: ERROR nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2343.418592] env[69784]: Faults: ['InvalidArgument'] [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] Traceback (most recent call last): [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] yield resources [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] self.driver.spawn(context, instance, image_meta, [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] self._fetch_image_if_missing(context, vi) [ 2343.418592] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] image_cache(vi, tmp_image_ds_loc) [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] vm_util.copy_virtual_disk( [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] session._wait_for_task(vmdk_copy_task) [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] return self.wait_for_task(task_ref) [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] return evt.wait() [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] result = hub.switch() [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2343.418986] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] return self.greenlet.switch() [ 2343.419378] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2343.419378] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] self.f(*self.args, **self.kw) [ 2343.419378] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2343.419378] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] raise exceptions.translate_fault(task_info.error) [ 2343.419378] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2343.419378] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] Faults: ['InvalidArgument'] [ 2343.419378] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] [ 2343.419378] env[69784]: INFO nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Terminating instance [ 2343.420401] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2343.420612] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2343.420844] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c6dc389-219e-49ea-be83-c133de203e99 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.423237] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2343.423458] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2343.424174] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81e7f86-973e-4d5b-af73-5a7ae2488430 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.430638] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2343.430858] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d876344-a4b4-4014-9b08-0bad51bbd713 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.432980] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2343.433183] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2343.434124] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ff256bd-d800-4d54-b981-7427b15638fb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.438766] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 2343.438766] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52fdde7e-3b1e-ee7e-dcd6-507d0ed0eca2" [ 2343.438766] env[69784]: _type = "Task" [ 2343.438766] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2343.445754] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52fdde7e-3b1e-ee7e-dcd6-507d0ed0eca2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2343.497845] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2343.498167] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2343.498365] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Deleting the datastore file [datastore1] 4479a824-1f93-45d0-953f-57736580d86f {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2343.498674] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ddab74d-51dc-4bc1-ab0b-0b8f674f39e8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.505149] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Waiting for the task: (returnval){ [ 2343.505149] env[69784]: value = "task-3467236" [ 2343.505149] env[69784]: _type = "Task" [ 2343.505149] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2343.512857] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Task: {'id': task-3467236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2343.949335] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2343.949697] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating directory with path [datastore1] vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2343.949753] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de4909dd-c6b0-4896-a311-39f5cf85eedc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.961553] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Created directory with path [datastore1] vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2343.961740] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Fetch image to [datastore1] vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2343.961911] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2343.962632] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308ca7aa-d176-457c-a95b-775a55b2afea {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.969057] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61be2c13-55aa-4a14-be5e-d48fe5e2be65 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.977650] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a987fa6a-208b-41fb-a643-6d9f64f926d6 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.010389] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9650c6d-a684-42e2-aa3b-93581087bc1d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.016791] env[69784]: DEBUG oslo_vmware.api [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Task: {'id': task-3467236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085002} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2344.018161] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2344.018349] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2344.018516] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2344.018684] env[69784]: INFO nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2344.020363] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7ef31729-3e0f-4062-9ef5-2d7ffb8f0e68 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.022132] env[69784]: DEBUG nova.compute.claims [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2344.022307] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2344.022515] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2344.042539] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2344.094913] env[69784]: DEBUG oslo_vmware.rw_handles [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2344.154966] env[69784]: DEBUG oslo_vmware.rw_handles [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2344.155213] env[69784]: DEBUG oslo_vmware.rw_handles [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2344.230156] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c2b639-3a9f-4a30-98df-ddac89842a70 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.237578] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b3263d-90f4-4d55-b32d-186ca4166fd7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.267695] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab1ec64-0a0b-4d76-b8a6-8f78737926f9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.274760] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6cbc83-8f52-4b0a-95f2-c2461b8f88e1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.287474] env[69784]: DEBUG nova.compute.provider_tree [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2344.295855] env[69784]: DEBUG nova.scheduler.client.report [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2344.311596] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.289s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2344.312130] env[69784]: ERROR nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2344.312130] env[69784]: Faults: ['InvalidArgument'] [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] Traceback (most recent call last): [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] self.driver.spawn(context, instance, image_meta, [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] self._fetch_image_if_missing(context, vi) [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] image_cache(vi, tmp_image_ds_loc) [ 2344.312130] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] vm_util.copy_virtual_disk( [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] session._wait_for_task(vmdk_copy_task) [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] return self.wait_for_task(task_ref) [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] return evt.wait() [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] result = hub.switch() [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] return self.greenlet.switch() [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2344.312487] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] self.f(*self.args, **self.kw) [ 2344.312824] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2344.312824] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] raise exceptions.translate_fault(task_info.error) [ 2344.312824] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2344.312824] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] Faults: ['InvalidArgument'] [ 2344.312824] env[69784]: ERROR nova.compute.manager [instance: 4479a824-1f93-45d0-953f-57736580d86f] [ 2344.312824] env[69784]: DEBUG nova.compute.utils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2344.314193] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Build of instance 4479a824-1f93-45d0-953f-57736580d86f was re-scheduled: A specified parameter was not correct: fileType [ 2344.314193] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2344.314564] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2344.314785] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2344.314985] env[69784]: DEBUG nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2344.315190] env[69784]: DEBUG nova.network.neutron [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2344.642767] env[69784]: DEBUG nova.network.neutron [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2344.654733] env[69784]: INFO nova.compute.manager [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Took 0.34 seconds to deallocate network for instance. [ 2344.746653] env[69784]: INFO nova.scheduler.client.report [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Deleted allocations for instance 4479a824-1f93-45d0-953f-57736580d86f [ 2344.772934] env[69784]: DEBUG oslo_concurrency.lockutils [None req-8a185aa8-8c73-4751-8b85-fc10e9e151ae tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "4479a824-1f93-45d0-953f-57736580d86f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 609.793s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2344.774439] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "4479a824-1f93-45d0-953f-57736580d86f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 413.539s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2344.774439] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Acquiring lock "4479a824-1f93-45d0-953f-57736580d86f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2344.774439] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "4479a824-1f93-45d0-953f-57736580d86f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2344.774439] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "4479a824-1f93-45d0-953f-57736580d86f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2344.776357] env[69784]: INFO nova.compute.manager [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Terminating instance [ 2344.778232] env[69784]: DEBUG nova.compute.manager [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2344.778431] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2344.778803] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98087b7c-e114-4c9f-b695-ec28735442e0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.789868] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179b1f01-81af-4465-ad67-09b36512fa03 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.818573] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4479a824-1f93-45d0-953f-57736580d86f could not be found. [ 2344.818781] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2344.819008] env[69784]: INFO nova.compute.manager [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2344.819287] env[69784]: DEBUG oslo.service.loopingcall [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2344.819507] env[69784]: DEBUG nova.compute.manager [-] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2344.819604] env[69784]: DEBUG nova.network.neutron [-] [instance: 4479a824-1f93-45d0-953f-57736580d86f] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2344.846381] env[69784]: DEBUG nova.network.neutron [-] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2344.855028] env[69784]: INFO nova.compute.manager [-] [instance: 4479a824-1f93-45d0-953f-57736580d86f] Took 0.04 seconds to deallocate network for instance. [ 2344.942998] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5dddffaa-5a6b-4969-8685-0ff825ae6ace tempest-ServersTestJSON-1328085021 tempest-ServersTestJSON-1328085021-project-member] Lock "4479a824-1f93-45d0-953f-57736580d86f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2371.840442] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.840726] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2373.840635] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.840598] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2376.839637] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2377.839594] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.841697] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.842117] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2380.842117] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2380.860154] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2380.860298] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2380.860428] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2380.860556] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2380.860680] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2380.860804] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2380.860923] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2380.861153] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2380.861311] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2380.861764] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.861944] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.874323] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2380.874482] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2380.874632] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2380.874780] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2380.876543] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c752155c-cd88-463b-9d02-713e7fede5c1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.885507] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f70dc7-7012-4e52-8c37-8cfefc3b78b9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.899355] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c177f22-6c92-4d8c-be06-e06a2463fad0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.905465] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04beabeb-c1d8-4cf7-ae74-d876ca598758 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.933702] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180928MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2380.933844] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2380.934043] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2380.997562] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2380.997562] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2380.997562] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2380.997562] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e35ffffb-c0fd-4236-b489-80eb0fdb4e37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2380.997761] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 86a94763-92c8-4689-b37a-3dc6c1ec744c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2380.997794] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e1969fb1-0f70-42c9-a362-f1efb6ee4619 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2380.997932] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 265a4f1d-8214-42ac-bbd0-4c3758ea6fed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2380.998021] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2380.998211] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2380.998349] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2381.091632] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e0e5f3-a0e1-4048-bece-d6a5631305e9 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.099254] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2229ac26-b2c4-4ccc-8db5-73d59ded6de2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.129929] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60db56ff-03fe-49d4-be70-4af8c5e1c959 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.137015] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f63bf47-fe3f-488b-a1d9-94599eb462bc {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.149647] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2381.157509] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2381.171072] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2381.171254] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.237s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2382.164611] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2390.969550] env[69784]: WARNING oslo_vmware.rw_handles [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2390.969550] env[69784]: ERROR oslo_vmware.rw_handles [ 2390.970246] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2390.972273] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2390.972554] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Copying Virtual Disk [datastore1] vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/7e0013a5-a969-4eb1-93af-84187626c760/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2390.972862] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa3376aa-cc59-44db-a95e-134e530156eb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.981290] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 2390.981290] env[69784]: value = "task-3467237" [ 2390.981290] env[69784]: _type = "Task" [ 2390.981290] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2390.989060] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2391.492079] env[69784]: DEBUG oslo_vmware.exceptions [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2391.492315] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2391.492891] env[69784]: ERROR nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2391.492891] env[69784]: Faults: ['InvalidArgument'] [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Traceback (most recent call last): [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] yield resources [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] self.driver.spawn(context, instance, image_meta, [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] self._fetch_image_if_missing(context, vi) [ 2391.492891] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] image_cache(vi, tmp_image_ds_loc) [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] vm_util.copy_virtual_disk( [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] session._wait_for_task(vmdk_copy_task) [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] return self.wait_for_task(task_ref) [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] return evt.wait() [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] result = hub.switch() [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2391.493295] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] return self.greenlet.switch() [ 2391.493657] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2391.493657] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] self.f(*self.args, **self.kw) [ 2391.493657] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2391.493657] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] raise exceptions.translate_fault(task_info.error) [ 2391.493657] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2391.493657] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Faults: ['InvalidArgument'] [ 2391.493657] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] [ 2391.493657] env[69784]: INFO nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Terminating instance [ 2391.494716] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2391.494928] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2391.495178] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-657ffd6d-dba8-4f37-bec9-2423f1295fca {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.497327] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2391.497529] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2391.498263] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3c6cac-2d8f-4f42-aaff-6f56d374e8fd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.504594] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2391.504810] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae2f04c5-ab2b-41a7-80ce-4f3f27494032 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.506894] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2391.507078] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2391.507994] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03ce041a-3df8-47e6-9f39-faea93266d9e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.512287] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for the task: (returnval){ [ 2391.512287] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c724ee-d0f5-d766-3aa2-d51f770a42a4" [ 2391.512287] env[69784]: _type = "Task" [ 2391.512287] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2391.519249] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52c724ee-d0f5-d766-3aa2-d51f770a42a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2391.570760] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2391.570980] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2391.571180] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleting the datastore file [datastore1] ec925fe0-8f7b-46c0-8d61-6a9cf989e798 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2391.571436] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63e0c105-47bf-44bd-a205-3bd83ce0b046 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.579499] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for the task: (returnval){ [ 2391.579499] env[69784]: value = "task-3467239" [ 2391.579499] env[69784]: _type = "Task" [ 2391.579499] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2391.586812] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2392.022775] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2392.023177] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Creating directory with path [datastore1] vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2392.023265] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a847725-2db0-4adf-950a-2aa8feefa53f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.035124] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Created directory with path [datastore1] vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2392.035314] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Fetch image to [datastore1] vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2392.035483] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2392.036224] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a14fde5-97b0-4444-a9a7-d7e80fe819d0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.042638] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a9fda1-32dc-4fb3-9a69-dc43c97539a1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.051411] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad897dc-5220-48a9-acd1-4c025333ea06 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.083257] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd945fb-2c89-4466-b33e-195f03b4223b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.090178] env[69784]: DEBUG oslo_vmware.api [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Task: {'id': task-3467239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072323} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2392.091593] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2392.091883] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2392.092097] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2392.092305] env[69784]: INFO nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2392.094766] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-54ca603f-7f1e-48f0-909b-a5b8114fe355 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.097501] env[69784]: DEBUG nova.compute.claims [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2392.097776] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2392.098144] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2392.120201] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2392.241427] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01ce1cf-3670-441a-b208-4ebf5257d8d1 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.248794] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb5c266-2bc8-4670-b380-00472d5c3d6e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.252525] env[69784]: DEBUG oslo_vmware.rw_handles [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2392.333480] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259e0f2d-9159-460d-8a62-d7a36dab651e {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.337020] env[69784]: DEBUG oslo_vmware.rw_handles [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2392.337020] env[69784]: DEBUG oslo_vmware.rw_handles [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2392.341376] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa471b6-89ee-4171-a4d9-1c1e3f622896 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.355591] env[69784]: DEBUG nova.compute.provider_tree [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2392.365667] env[69784]: DEBUG nova.scheduler.client.report [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2392.379910] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.282s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2392.380728] env[69784]: ERROR nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2392.380728] env[69784]: Faults: ['InvalidArgument'] [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Traceback (most recent call last): [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] self.driver.spawn(context, instance, image_meta, [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] self._fetch_image_if_missing(context, vi) [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] image_cache(vi, tmp_image_ds_loc) [ 2392.380728] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] vm_util.copy_virtual_disk( [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] session._wait_for_task(vmdk_copy_task) [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] return self.wait_for_task(task_ref) [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] return evt.wait() [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] result = hub.switch() [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] return self.greenlet.switch() [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2392.381080] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] self.f(*self.args, **self.kw) [ 2392.381383] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2392.381383] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] raise exceptions.translate_fault(task_info.error) [ 2392.381383] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2392.381383] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Faults: ['InvalidArgument'] [ 2392.381383] env[69784]: ERROR nova.compute.manager [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] [ 2392.381383] env[69784]: DEBUG nova.compute.utils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2392.382584] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Build of instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 was re-scheduled: A specified parameter was not correct: fileType [ 2392.382584] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2392.382952] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2392.383144] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2392.383315] env[69784]: DEBUG nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2392.383476] env[69784]: DEBUG nova.network.neutron [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2392.690175] env[69784]: DEBUG nova.network.neutron [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2392.701032] env[69784]: INFO nova.compute.manager [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Took 0.32 seconds to deallocate network for instance. [ 2392.799294] env[69784]: INFO nova.scheduler.client.report [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Deleted allocations for instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 [ 2392.823186] env[69784]: DEBUG oslo_concurrency.lockutils [None req-cde5e228-3ae1-4892-a955-bd9fb6b692de tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 640.734s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2392.823526] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 444.755s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2392.823745] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Acquiring lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2392.823951] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2392.824138] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2392.826093] env[69784]: INFO nova.compute.manager [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Terminating instance [ 2392.828057] env[69784]: DEBUG nova.compute.manager [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2392.828261] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2392.828778] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3dc2c7b7-86a8-413e-838f-949286ff4772 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.837839] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b542480-e6ce-432d-85e9-8c58211edb4b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.864151] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ec925fe0-8f7b-46c0-8d61-6a9cf989e798 could not be found. [ 2392.864347] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2392.864522] env[69784]: INFO nova.compute.manager [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2392.864757] env[69784]: DEBUG oslo.service.loopingcall [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2392.865212] env[69784]: DEBUG nova.compute.manager [-] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2392.865315] env[69784]: DEBUG nova.network.neutron [-] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2392.888449] env[69784]: DEBUG nova.network.neutron [-] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2392.896226] env[69784]: INFO nova.compute.manager [-] [instance: ec925fe0-8f7b-46c0-8d61-6a9cf989e798] Took 0.03 seconds to deallocate network for instance. [ 2392.978936] env[69784]: DEBUG oslo_concurrency.lockutils [None req-5a1ceac5-f8f3-4179-944a-0a06683a88a0 tempest-DeleteServersTestJSON-422882759 tempest-DeleteServersTestJSON-422882759-project-member] Lock "ec925fe0-8f7b-46c0-8d61-6a9cf989e798" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.155s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.577077] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "077714dd-c77d-40d7-8546-e44eb00e24b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2408.577408] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "077714dd-c77d-40d7-8546-e44eb00e24b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.587496] env[69784]: DEBUG nova.compute.manager [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Starting instance... {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2408.635758] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2408.636115] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.637803] env[69784]: INFO nova.compute.claims [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2408.792321] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c5cccd-524c-448c-b3bc-05bc8ab3bed7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.800180] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e7d796-fda4-4370-99b6-1044658b0bb7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.829505] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b83b0b-14ce-43bc-a557-21616cf324a8 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.837042] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2135bb13-ee27-49e3-83a5-759c32b18eb0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.850595] env[69784]: DEBUG nova.compute.provider_tree [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2408.859159] env[69784]: DEBUG nova.scheduler.client.report [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2408.871635] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.236s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.872090] env[69784]: DEBUG nova.compute.manager [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Start building networks asynchronously for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2408.911420] env[69784]: DEBUG nova.compute.utils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Using /dev/sd instead of None {{(pid=69784) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2408.912577] env[69784]: DEBUG nova.compute.manager [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Allocating IP information in the background. {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2408.912743] env[69784]: DEBUG nova.network.neutron [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] allocate_for_instance() {{(pid=69784) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2408.920755] env[69784]: DEBUG nova.compute.manager [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Start building block device mappings for instance. {{(pid=69784) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2408.968703] env[69784]: DEBUG nova.policy [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed425f078af549e9b7a61cf53066bb1e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a9ca6c244dd4ce59b9938547e24c7db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69784) authorize /opt/stack/nova/nova/policy.py:203}} [ 2408.985271] env[69784]: DEBUG nova.compute.manager [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Start spawning the instance on the hypervisor. {{(pid=69784) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2409.009264] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:58:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:58:33Z,direct_url=,disk_format='vmdk',id=a83f2316-67d7-4612-bb03-1146b6453ed4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7b0f6ea1d8724018ae13e62fe7220317',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:58:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2409.009512] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Flavor limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2409.009670] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Image limits 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2409.009847] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Flavor pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2409.009993] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Image pref 0:0:0 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2409.010183] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69784) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2409.010396] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2409.010556] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2409.010805] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Got 1 possible topologies {{(pid=69784) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2409.010886] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2409.011073] env[69784]: DEBUG nova.virt.hardware [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69784) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2409.011911] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb51ed49-25d9-483e-88fd-585baf48ff81 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.019616] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3efeef-784a-487b-8470-c7b07f0b4c9c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.510767] env[69784]: DEBUG nova.network.neutron [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Successfully created port: fb3f437d-8e32-48eb-b67a-4a9a2e6575a7 {{(pid=69784) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2410.050018] env[69784]: DEBUG nova.compute.manager [req-241809d5-40e8-4849-a004-f6f041e90015 req-6ddb46fb-35cc-46aa-b6d6-1bb3c9b525fe service nova] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Received event network-vif-plugged-fb3f437d-8e32-48eb-b67a-4a9a2e6575a7 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2410.050314] env[69784]: DEBUG oslo_concurrency.lockutils [req-241809d5-40e8-4849-a004-f6f041e90015 req-6ddb46fb-35cc-46aa-b6d6-1bb3c9b525fe service nova] Acquiring lock "077714dd-c77d-40d7-8546-e44eb00e24b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2410.050463] env[69784]: DEBUG oslo_concurrency.lockutils [req-241809d5-40e8-4849-a004-f6f041e90015 req-6ddb46fb-35cc-46aa-b6d6-1bb3c9b525fe service nova] Lock "077714dd-c77d-40d7-8546-e44eb00e24b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2410.050630] env[69784]: DEBUG oslo_concurrency.lockutils [req-241809d5-40e8-4849-a004-f6f041e90015 req-6ddb46fb-35cc-46aa-b6d6-1bb3c9b525fe service nova] Lock "077714dd-c77d-40d7-8546-e44eb00e24b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2410.050794] env[69784]: DEBUG nova.compute.manager [req-241809d5-40e8-4849-a004-f6f041e90015 req-6ddb46fb-35cc-46aa-b6d6-1bb3c9b525fe service nova] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] No waiting events found dispatching network-vif-plugged-fb3f437d-8e32-48eb-b67a-4a9a2e6575a7 {{(pid=69784) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2410.050954] env[69784]: WARNING nova.compute.manager [req-241809d5-40e8-4849-a004-f6f041e90015 req-6ddb46fb-35cc-46aa-b6d6-1bb3c9b525fe service nova] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Received unexpected event network-vif-plugged-fb3f437d-8e32-48eb-b67a-4a9a2e6575a7 for instance with vm_state building and task_state spawning. [ 2410.129373] env[69784]: DEBUG nova.network.neutron [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Successfully updated port: fb3f437d-8e32-48eb-b67a-4a9a2e6575a7 {{(pid=69784) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2410.140805] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "refresh_cache-077714dd-c77d-40d7-8546-e44eb00e24b3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2410.140955] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired lock "refresh_cache-077714dd-c77d-40d7-8546-e44eb00e24b3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2410.141129] env[69784]: DEBUG nova.network.neutron [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Building network info cache for instance {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2410.179605] env[69784]: DEBUG nova.network.neutron [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Instance cache missing network info. {{(pid=69784) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2410.332189] env[69784]: DEBUG nova.network.neutron [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Updating instance_info_cache with network_info: [{"id": "fb3f437d-8e32-48eb-b67a-4a9a2e6575a7", "address": "fa:16:3e:c7:c8:fd", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb3f437d-8e", "ovs_interfaceid": "fb3f437d-8e32-48eb-b67a-4a9a2e6575a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2410.344698] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Releasing lock "refresh_cache-077714dd-c77d-40d7-8546-e44eb00e24b3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2410.344990] env[69784]: DEBUG nova.compute.manager [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Instance network_info: |[{"id": "fb3f437d-8e32-48eb-b67a-4a9a2e6575a7", "address": "fa:16:3e:c7:c8:fd", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb3f437d-8e", "ovs_interfaceid": "fb3f437d-8e32-48eb-b67a-4a9a2e6575a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69784) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2410.345395] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:c8:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb3f437d-8e32-48eb-b67a-4a9a2e6575a7', 'vif_model': 'vmxnet3'}] {{(pid=69784) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2410.352834] env[69784]: DEBUG oslo.service.loopingcall [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2410.353656] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Creating VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2410.353914] env[69784]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6bf4e645-f973-4289-939c-68603695fca0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.373382] env[69784]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2410.373382] env[69784]: value = "task-3467240" [ 2410.373382] env[69784]: _type = "Task" [ 2410.373382] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.380819] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467240, 'name': CreateVM_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.884177] env[69784]: DEBUG oslo_vmware.api [-] Task: {'id': task-3467240, 'name': CreateVM_Task, 'duration_secs': 0.282248} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2410.884330] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Created VM on the ESX host {{(pid=69784) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2410.885070] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2410.885246] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2410.885559] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2410.885803] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06365a1a-e0d0-40ad-bd08-777d17c84232 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.890043] env[69784]: DEBUG oslo_vmware.api [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for the task: (returnval){ [ 2410.890043] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]52f518ee-f751-c2d9-2ac5-79b4e640c504" [ 2410.890043] env[69784]: _type = "Task" [ 2410.890043] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.900344] env[69784]: DEBUG oslo_vmware.api [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]52f518ee-f751-c2d9-2ac5-79b4e640c504, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.400775] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2411.401178] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Processing image a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2411.401242] env[69784]: DEBUG oslo_concurrency.lockutils [None req-1e01f542-4277-4463-8d59-c8fea4950a1d tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2412.082517] env[69784]: DEBUG nova.compute.manager [req-45c3f68b-f926-4e7c-8add-62035c89f7b6 req-4d9302a7-9e14-4e38-a110-352cb5672840 service nova] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Received event network-changed-fb3f437d-8e32-48eb-b67a-4a9a2e6575a7 {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2412.082738] env[69784]: DEBUG nova.compute.manager [req-45c3f68b-f926-4e7c-8add-62035c89f7b6 req-4d9302a7-9e14-4e38-a110-352cb5672840 service nova] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Refreshing instance network info cache due to event network-changed-fb3f437d-8e32-48eb-b67a-4a9a2e6575a7. {{(pid=69784) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2412.082908] env[69784]: DEBUG oslo_concurrency.lockutils [req-45c3f68b-f926-4e7c-8add-62035c89f7b6 req-4d9302a7-9e14-4e38-a110-352cb5672840 service nova] Acquiring lock "refresh_cache-077714dd-c77d-40d7-8546-e44eb00e24b3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2412.083168] env[69784]: DEBUG oslo_concurrency.lockutils [req-45c3f68b-f926-4e7c-8add-62035c89f7b6 req-4d9302a7-9e14-4e38-a110-352cb5672840 service nova] Acquired lock "refresh_cache-077714dd-c77d-40d7-8546-e44eb00e24b3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2412.083233] env[69784]: DEBUG nova.network.neutron [req-45c3f68b-f926-4e7c-8add-62035c89f7b6 req-4d9302a7-9e14-4e38-a110-352cb5672840 service nova] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Refreshing network info cache for port fb3f437d-8e32-48eb-b67a-4a9a2e6575a7 {{(pid=69784) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2412.366139] env[69784]: DEBUG nova.network.neutron [req-45c3f68b-f926-4e7c-8add-62035c89f7b6 req-4d9302a7-9e14-4e38-a110-352cb5672840 service nova] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Updated VIF entry in instance network info cache for port fb3f437d-8e32-48eb-b67a-4a9a2e6575a7. {{(pid=69784) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2412.366514] env[69784]: DEBUG nova.network.neutron [req-45c3f68b-f926-4e7c-8add-62035c89f7b6 req-4d9302a7-9e14-4e38-a110-352cb5672840 service nova] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Updating instance_info_cache with network_info: [{"id": "fb3f437d-8e32-48eb-b67a-4a9a2e6575a7", "address": "fa:16:3e:c7:c8:fd", "network": {"id": "c5a63063-6b00-4337-986e-4580b1869bc6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-676037584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9ca6c244dd4ce59b9938547e24c7db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb3f437d-8e", "ovs_interfaceid": "fb3f437d-8e32-48eb-b67a-4a9a2e6575a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2412.376235] env[69784]: DEBUG oslo_concurrency.lockutils [req-45c3f68b-f926-4e7c-8add-62035c89f7b6 req-4d9302a7-9e14-4e38-a110-352cb5672840 service nova] Releasing lock "refresh_cache-077714dd-c77d-40d7-8546-e44eb00e24b3" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2432.839666] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2432.840093] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2433.839663] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2435.839875] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2437.841296] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2439.839747] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2440.840645] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2441.136022] env[69784]: WARNING oslo_vmware.rw_handles [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2441.136022] env[69784]: ERROR oslo_vmware.rw_handles [ 2441.136022] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2441.137221] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2441.137608] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Copying Virtual Disk [datastore1] vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/39107b7f-e660-45e9-a960-f1837766bfde/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2441.138058] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f5ca37b-9a10-44fe-8a42-88a0242c6aa0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.149330] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for the task: (returnval){ [ 2441.149330] env[69784]: value = "task-3467241" [ 2441.149330] env[69784]: _type = "Task" [ 2441.149330] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2441.157617] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': task-3467241, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2441.660324] env[69784]: DEBUG oslo_vmware.exceptions [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2441.660676] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2441.661318] env[69784]: ERROR nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2441.661318] env[69784]: Faults: ['InvalidArgument'] [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Traceback (most recent call last): [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] yield resources [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] self.driver.spawn(context, instance, image_meta, [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] self._fetch_image_if_missing(context, vi) [ 2441.661318] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] image_cache(vi, tmp_image_ds_loc) [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] vm_util.copy_virtual_disk( [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] session._wait_for_task(vmdk_copy_task) [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] return self.wait_for_task(task_ref) [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] return evt.wait() [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] result = hub.switch() [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2441.661657] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] return self.greenlet.switch() [ 2441.662066] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2441.662066] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] self.f(*self.args, **self.kw) [ 2441.662066] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2441.662066] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] raise exceptions.translate_fault(task_info.error) [ 2441.662066] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2441.662066] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Faults: ['InvalidArgument'] [ 2441.662066] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] [ 2441.662066] env[69784]: INFO nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Terminating instance [ 2441.663209] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2441.663414] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2441.663653] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-febec324-9f8a-4a2b-8331-fd34a50e780a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.665997] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2441.666213] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2441.666948] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d596754-73f3-4f6e-bf5e-cb6008dd0405 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.673875] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2441.674893] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cacedaed-df9c-4015-ba78-7f259acbf402 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.676237] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2441.676429] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2441.677141] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69902b8f-400b-4cb2-b7e0-f1b0e40437d4 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.682009] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for the task: (returnval){ [ 2441.682009] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]5200f697-1260-4203-9471-64e5c2e2e187" [ 2441.682009] env[69784]: _type = "Task" [ 2441.682009] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2441.690665] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]5200f697-1260-4203-9471-64e5c2e2e187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2441.738428] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2441.738637] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2441.738812] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Deleting the datastore file [datastore1] c78438d5-ddaa-4858-a161-af83e6c16e54 {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2441.739080] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45b66335-259b-43c3-a714-f5388510f1cd {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.745441] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for the task: (returnval){ [ 2441.745441] env[69784]: value = "task-3467243" [ 2441.745441] env[69784]: _type = "Task" [ 2441.745441] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2441.752625] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': task-3467243, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2441.839347] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2441.839524] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2441.839630] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2441.859459] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2441.859778] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2441.859778] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2441.859853] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2441.859964] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2441.860093] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2441.860214] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2441.860330] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2441.860448] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2441.860918] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2441.871738] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2441.871944] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2441.872123] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2441.872271] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2441.873280] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5355d06-c663-40eb-943e-a0e242c037bf {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.882541] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95288992-1b2d-488a-b8b5-364399bca48b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.895780] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57928ec3-f56b-4ae7-a954-bf5590549d67 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.901564] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982dcb8f-16f5-4327-b354-e36f47105200 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.929453] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180944MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2441.929580] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2441.929768] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2442.026103] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance c78438d5-ddaa-4858-a161-af83e6c16e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.026278] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 0a177681-5f4e-4dc5-baee-1303be38444a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.026460] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e35ffffb-c0fd-4236-b489-80eb0fdb4e37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.026588] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 86a94763-92c8-4689-b37a-3dc6c1ec744c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.026708] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e1969fb1-0f70-42c9-a362-f1efb6ee4619 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.026825] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 265a4f1d-8214-42ac-bbd0-4c3758ea6fed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.026941] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.027071] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 077714dd-c77d-40d7-8546-e44eb00e24b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.027311] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2442.027511] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2442.043971] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing inventories for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2442.056678] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating ProviderTree inventory for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2442.056850] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Updating inventory in ProviderTree for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2442.067589] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing aggregate associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, aggregates: None {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2442.085779] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Refreshing trait associations for resource provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69784) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2442.180931] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1cd43b-1836-41aa-87f1-5afe1a9adc9c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.193774] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1bda06-2e51-4f5d-af83-86f9cc347c13 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.196967] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2442.197204] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Creating directory with path [datastore1] vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2442.197688] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9ef2397-5521-46fd-a399-815831a2e653 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.224910] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0a78b1-492d-4f90-a37a-e5626692aca2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.227219] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Created directory with path [datastore1] vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2442.227407] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Fetch image to [datastore1] vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2442.227569] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2442.228227] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc39cda-13d3-42dc-b30a-a5c48c0b9653 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.237126] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358b5570-f5df-4216-a9ee-61614c825db7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.241472] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377ab4d1-949f-4303-8475-b8b31e520310 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.261119] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690a0f1e-c874-4958-90fb-1eb03511a7ca {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.265105] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2442.271045] env[69784]: DEBUG oslo_vmware.api [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Task: {'id': task-3467243, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070044} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2442.295014] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2442.295211] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2442.295404] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2442.295616] env[69784]: INFO nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2442.298227] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf062941-8ce8-4615-a1c4-e8c475285248 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.301159] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2442.303965] env[69784]: DEBUG nova.compute.claims [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2442.304163] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2442.308310] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f8289197-1d39-41e2-8b2c-53484615c70d {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.318431] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2442.318616] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.389s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2442.318847] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.015s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2442.329813] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2442.440131] env[69784]: DEBUG oslo_vmware.rw_handles [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2442.500016] env[69784]: DEBUG oslo_vmware.rw_handles [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2442.500204] env[69784]: DEBUG oslo_vmware.rw_handles [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2442.540465] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99d0ef8-9c21-4617-a6a9-321eec045854 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.548026] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ee485c-9039-46f5-8dbe-5722b2d729fb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.577037] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac14c0df-fd85-4283-bbfc-0cb9dc61d1d0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.583543] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bf2da0-cc3b-4bab-9e38-3a4520277712 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.595960] env[69784]: DEBUG nova.compute.provider_tree [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2442.604737] env[69784]: DEBUG nova.scheduler.client.report [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2442.617533] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.299s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2442.618090] env[69784]: ERROR nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2442.618090] env[69784]: Faults: ['InvalidArgument'] [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Traceback (most recent call last): [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] self.driver.spawn(context, instance, image_meta, [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] self._fetch_image_if_missing(context, vi) [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] image_cache(vi, tmp_image_ds_loc) [ 2442.618090] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] vm_util.copy_virtual_disk( [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] session._wait_for_task(vmdk_copy_task) [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] return self.wait_for_task(task_ref) [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] return evt.wait() [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] result = hub.switch() [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] return self.greenlet.switch() [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2442.618521] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] self.f(*self.args, **self.kw) [ 2442.618881] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2442.618881] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] raise exceptions.translate_fault(task_info.error) [ 2442.618881] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2442.618881] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Faults: ['InvalidArgument'] [ 2442.618881] env[69784]: ERROR nova.compute.manager [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] [ 2442.618881] env[69784]: DEBUG nova.compute.utils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2442.620045] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Build of instance c78438d5-ddaa-4858-a161-af83e6c16e54 was re-scheduled: A specified parameter was not correct: fileType [ 2442.620045] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2442.620422] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2442.620607] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2442.620832] env[69784]: DEBUG nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2442.621017] env[69784]: DEBUG nova.network.neutron [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2442.920953] env[69784]: DEBUG nova.network.neutron [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2442.930918] env[69784]: INFO nova.compute.manager [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Took 0.31 seconds to deallocate network for instance. [ 2443.027552] env[69784]: INFO nova.scheduler.client.report [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Deleted allocations for instance c78438d5-ddaa-4858-a161-af83e6c16e54 [ 2443.050008] env[69784]: DEBUG oslo_concurrency.lockutils [None req-825654d8-d43e-4fba-82b1-2a8787819e13 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "c78438d5-ddaa-4858-a161-af83e6c16e54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 577.976s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2443.050008] env[69784]: DEBUG oslo_concurrency.lockutils [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "c78438d5-ddaa-4858-a161-af83e6c16e54" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 382.532s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2443.050185] env[69784]: DEBUG oslo_concurrency.lockutils [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Acquiring lock "c78438d5-ddaa-4858-a161-af83e6c16e54-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2443.050375] env[69784]: DEBUG oslo_concurrency.lockutils [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "c78438d5-ddaa-4858-a161-af83e6c16e54-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2443.050536] env[69784]: DEBUG oslo_concurrency.lockutils [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "c78438d5-ddaa-4858-a161-af83e6c16e54-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2443.052439] env[69784]: INFO nova.compute.manager [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Terminating instance [ 2443.054589] env[69784]: DEBUG nova.compute.manager [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2443.054589] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2443.054827] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-842b8b3b-3809-43bb-87e6-bd20e757fb45 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2443.064242] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ab0fef-4fdd-48b8-b2e7-da8edf2ddcba {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2443.091497] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c78438d5-ddaa-4858-a161-af83e6c16e54 could not be found. [ 2443.091736] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2443.091868] env[69784]: INFO nova.compute.manager [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2443.092118] env[69784]: DEBUG oslo.service.loopingcall [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2443.092360] env[69784]: DEBUG nova.compute.manager [-] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2443.092456] env[69784]: DEBUG nova.network.neutron [-] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2443.122029] env[69784]: DEBUG nova.network.neutron [-] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2443.129519] env[69784]: INFO nova.compute.manager [-] [instance: c78438d5-ddaa-4858-a161-af83e6c16e54] Took 0.04 seconds to deallocate network for instance. [ 2443.219818] env[69784]: DEBUG oslo_concurrency.lockutils [None req-128fdb63-9326-4efa-84d1-43e3fd06a8f5 tempest-AttachVolumeShelveTestJSON-1306079969 tempest-AttachVolumeShelveTestJSON-1306079969-project-member] Lock "c78438d5-ddaa-4858-a161-af83e6c16e54" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2444.316136] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2447.840991] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2447.841266] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances with incomplete migration {{(pid=69784) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2450.844648] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2450.864620] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2450.864781] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Cleaning up deleted instances {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2450.872525] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] There are 0 instances to clean {{(pid=69784) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2452.839750] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2473.466721] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2473.467228] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Getting list of instances from cluster (obj){ [ 2473.467228] env[69784]: value = "domain-c8" [ 2473.467228] env[69784]: _type = "ClusterComputeResource" [ 2473.467228] env[69784]: } {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2473.468159] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6873dbcd-1cb1-4c50-9ffd-1469a4efa527 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.261605] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Got total of 7 instances {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2482.348773] env[69784]: DEBUG oslo_concurrency.lockutils [None req-c1b0ddef-a2eb-4221-ae38-4301e53ae6eb tempest-ServerRescueNegativeTestJSON-1927659967 tempest-ServerRescueNegativeTestJSON-1927659967-project-member] Acquiring lock "74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2487.950156] env[69784]: WARNING oslo_vmware.rw_handles [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles response.begin() [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2487.950156] env[69784]: ERROR oslo_vmware.rw_handles [ 2487.950750] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Downloaded image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2487.952758] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Caching image {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2487.953015] env[69784]: DEBUG nova.virt.vmwareapi.vm_util [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Copying Virtual Disk [datastore1] vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk to [datastore1] vmware_temp/d920619b-241c-4012-bccc-4433bd7ef457/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk {{(pid=69784) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2487.953296] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d2b4429-0dda-4fae-a7f6-57e86e4c8cbb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2487.961206] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for the task: (returnval){ [ 2487.961206] env[69784]: value = "task-3467244" [ 2487.961206] env[69784]: _type = "Task" [ 2487.961206] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2487.969375] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': task-3467244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2488.471498] env[69784]: DEBUG oslo_vmware.exceptions [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Fault InvalidArgument not matched. {{(pid=69784) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2488.471770] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2488.472334] env[69784]: ERROR nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2488.472334] env[69784]: Faults: ['InvalidArgument'] [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Traceback (most recent call last): [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] yield resources [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] self.driver.spawn(context, instance, image_meta, [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] self._fetch_image_if_missing(context, vi) [ 2488.472334] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] image_cache(vi, tmp_image_ds_loc) [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] vm_util.copy_virtual_disk( [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] session._wait_for_task(vmdk_copy_task) [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] return self.wait_for_task(task_ref) [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] return evt.wait() [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] result = hub.switch() [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2488.472889] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] return self.greenlet.switch() [ 2488.473255] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2488.473255] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] self.f(*self.args, **self.kw) [ 2488.473255] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2488.473255] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] raise exceptions.translate_fault(task_info.error) [ 2488.473255] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2488.473255] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Faults: ['InvalidArgument'] [ 2488.473255] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] [ 2488.473255] env[69784]: INFO nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Terminating instance [ 2488.474134] env[69784]: DEBUG oslo_concurrency.lockutils [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a83f2316-67d7-4612-bb03-1146b6453ed4/a83f2316-67d7-4612-bb03-1146b6453ed4.vmdk" {{(pid=69784) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2488.474348] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2488.474577] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55a6f189-2ed9-43e8-9865-97fdc4ad37f7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.476698] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2488.476921] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2488.477640] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1498a8d-3728-4357-8894-0a06ddf17b7c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.484148] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Unregistering the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2488.484354] env[69784]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d54d8d7a-a42e-485a-ab55-1af8b2bdaae0 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.486434] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2488.486604] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69784) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2488.487539] env[69784]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36816dd3-5bcf-4728-8a61-9fa1ca3542e7 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.492086] env[69784]: DEBUG oslo_vmware.api [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Waiting for the task: (returnval){ [ 2488.492086] env[69784]: value = "session[522103cb-1b10-2bdc-db46-04ef996e3e77]529a15e3-9ec2-5d3b-bff0-4571778611bb" [ 2488.492086] env[69784]: _type = "Task" [ 2488.492086] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2488.499100] env[69784]: DEBUG oslo_vmware.api [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Task: {'id': session[522103cb-1b10-2bdc-db46-04ef996e3e77]529a15e3-9ec2-5d3b-bff0-4571778611bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2488.555619] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Unregistered the VM {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2488.555871] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Deleting contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2488.556066] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Deleting the datastore file [datastore1] 0a177681-5f4e-4dc5-baee-1303be38444a {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2488.556345] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96235e84-2b5e-4a6c-8668-2a804d7d4156 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2488.562679] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for the task: (returnval){ [ 2488.562679] env[69784]: value = "task-3467246" [ 2488.562679] env[69784]: _type = "Task" [ 2488.562679] env[69784]: } to complete. {{(pid=69784) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2488.569939] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': task-3467246, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2489.002157] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Preparing fetch location {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2489.002434] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Creating directory with path [datastore1] vmware_temp/d0f1276c-4c3d-40c5-adc2-b51f5554c934/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2489.002647] env[69784]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d38bb6b-b9cc-469c-b41a-bbc12d346988 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.013521] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Created directory with path [datastore1] vmware_temp/d0f1276c-4c3d-40c5-adc2-b51f5554c934/a83f2316-67d7-4612-bb03-1146b6453ed4 {{(pid=69784) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2489.013705] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Fetch image to [datastore1] vmware_temp/d0f1276c-4c3d-40c5-adc2-b51f5554c934/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk {{(pid=69784) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2489.013873] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to [datastore1] vmware_temp/d0f1276c-4c3d-40c5-adc2-b51f5554c934/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk on the data store datastore1 {{(pid=69784) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2489.014589] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee046e33-0803-4e35-8402-72d2c1673341 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.020930] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7626a55-ba42-4c48-bf0b-3a0720b77c6a {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.029755] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdb4d2f-d2d1-4e33-ae7c-5f7026e4ce66 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.060359] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71dd7225-34a1-48f3-84a7-5527dce75270 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.068366] env[69784]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-985e323e-ff48-4a56-8b37-fa901fe87bac {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.072438] env[69784]: DEBUG oslo_vmware.api [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Task: {'id': task-3467246, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081497} completed successfully. {{(pid=69784) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2489.072939] env[69784]: DEBUG nova.virt.vmwareapi.ds_util [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Deleted the datastore file {{(pid=69784) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2489.073145] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Deleted contents of the VM from datastore datastore1 {{(pid=69784) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2489.073312] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2489.073483] env[69784]: INFO nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2489.076804] env[69784]: DEBUG nova.compute.claims [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Aborting claim: {{(pid=69784) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2489.076989] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2489.077229] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2489.090265] env[69784]: DEBUG nova.virt.vmwareapi.images [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Downloading image file data a83f2316-67d7-4612-bb03-1146b6453ed4 to the data store datastore1 {{(pid=69784) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2489.139917] env[69784]: DEBUG oslo_vmware.rw_handles [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0f1276c-4c3d-40c5-adc2-b51f5554c934/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2489.199986] env[69784]: DEBUG oslo_vmware.rw_handles [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Completed reading data from the image iterator. {{(pid=69784) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2489.200240] env[69784]: DEBUG oslo_vmware.rw_handles [None req-3b2186f0-204c-4be2-9d00-ebeaeb20c002 tempest-AttachVolumeNegativeTest-414797635 tempest-AttachVolumeNegativeTest-414797635-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0f1276c-4c3d-40c5-adc2-b51f5554c934/a83f2316-67d7-4612-bb03-1146b6453ed4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69784) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2489.256758] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6413247-4d5f-4061-b13d-d78900ce0154 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.264244] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5d0928-4569-486d-9890-266fedda9f43 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.294020] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d206292-b95d-4762-9932-890ac47822c2 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.300569] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c96795a-93ed-4188-8070-a119367ac11f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.312864] env[69784]: DEBUG nova.compute.provider_tree [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2489.321063] env[69784]: DEBUG nova.scheduler.client.report [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2489.334189] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.257s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2489.334699] env[69784]: ERROR nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2489.334699] env[69784]: Faults: ['InvalidArgument'] [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Traceback (most recent call last): [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] self.driver.spawn(context, instance, image_meta, [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] self._fetch_image_if_missing(context, vi) [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] image_cache(vi, tmp_image_ds_loc) [ 2489.334699] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] vm_util.copy_virtual_disk( [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] session._wait_for_task(vmdk_copy_task) [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] return self.wait_for_task(task_ref) [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] return evt.wait() [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] result = hub.switch() [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] return self.greenlet.switch() [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2489.335145] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] self.f(*self.args, **self.kw) [ 2489.335542] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2489.335542] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] raise exceptions.translate_fault(task_info.error) [ 2489.335542] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2489.335542] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Faults: ['InvalidArgument'] [ 2489.335542] env[69784]: ERROR nova.compute.manager [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] [ 2489.335542] env[69784]: DEBUG nova.compute.utils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] VimFaultException {{(pid=69784) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2489.336724] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Build of instance 0a177681-5f4e-4dc5-baee-1303be38444a was re-scheduled: A specified parameter was not correct: fileType [ 2489.336724] env[69784]: Faults: ['InvalidArgument'] {{(pid=69784) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2489.337153] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Unplugging VIFs for instance {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2489.337330] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69784) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2489.337494] env[69784]: DEBUG nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2489.337653] env[69784]: DEBUG nova.network.neutron [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2489.633302] env[69784]: DEBUG nova.network.neutron [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2489.646378] env[69784]: INFO nova.compute.manager [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Took 0.31 seconds to deallocate network for instance. [ 2489.737693] env[69784]: INFO nova.scheduler.client.report [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Deleted allocations for instance 0a177681-5f4e-4dc5-baee-1303be38444a [ 2489.757893] env[69784]: DEBUG oslo_concurrency.lockutils [None req-30a0ea28-6656-4ea5-a928-b9d40b5ba6c1 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0a177681-5f4e-4dc5-baee-1303be38444a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 600.451s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2489.758785] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0a177681-5f4e-4dc5-baee-1303be38444a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 404.117s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2489.758785] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Acquiring lock "0a177681-5f4e-4dc5-baee-1303be38444a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2489.758785] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0a177681-5f4e-4dc5-baee-1303be38444a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2489.759114] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0a177681-5f4e-4dc5-baee-1303be38444a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2489.760918] env[69784]: INFO nova.compute.manager [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Terminating instance [ 2489.763288] env[69784]: DEBUG nova.compute.manager [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Start destroying the instance on the hypervisor. {{(pid=69784) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2489.763482] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Destroying instance {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2489.764758] env[69784]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8bdd3ac-0410-4c2e-bcf6-f742d34b0662 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.774523] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd87f6fc-41e6-4ca5-8df9-10c6f8994b22 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.805211] env[69784]: WARNING nova.virt.vmwareapi.vmops [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a177681-5f4e-4dc5-baee-1303be38444a could not be found. [ 2489.805497] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Instance destroyed {{(pid=69784) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2489.805737] env[69784]: INFO nova.compute.manager [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2489.806085] env[69784]: DEBUG oslo.service.loopingcall [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69784) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2489.806380] env[69784]: DEBUG nova.compute.manager [-] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Deallocating network for instance {{(pid=69784) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2489.806504] env[69784]: DEBUG nova.network.neutron [-] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] deallocate_for_instance() {{(pid=69784) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2489.829318] env[69784]: DEBUG nova.network.neutron [-] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Updating instance_info_cache with network_info: [] {{(pid=69784) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2489.837198] env[69784]: INFO nova.compute.manager [-] [instance: 0a177681-5f4e-4dc5-baee-1303be38444a] Took 0.03 seconds to deallocate network for instance. [ 2489.919658] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b7915d42-1e8a-47f0-b285-9dc7a3107188 tempest-ServerDiskConfigTestJSON-1319642046 tempest-ServerDiskConfigTestJSON-1319642046-project-member] Lock "0a177681-5f4e-4dc5-baee-1303be38444a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.161s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2495.667178] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2495.667178] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69784) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2495.840196] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2496.840936] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2498.839903] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2500.839400] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2501.839804] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2501.840205] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Starting heal instance info cache {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2501.840205] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Rebuilding the list of instances to heal {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2501.856452] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e35ffffb-c0fd-4236-b489-80eb0fdb4e37] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2501.856600] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 86a94763-92c8-4689-b37a-3dc6c1ec744c] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2501.856733] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: e1969fb1-0f70-42c9-a362-f1efb6ee4619] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2501.856859] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 265a4f1d-8214-42ac-bbd0-4c3758ea6fed] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2501.857010] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2501.857162] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] [instance: 077714dd-c77d-40d7-8546-e44eb00e24b3] Skipping network cache update for instance because it is Building. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2501.857288] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Didn't find any instances for network info cache update. {{(pid=69784) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2501.857789] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2502.839724] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2502.851454] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2502.851891] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2502.851891] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2502.852090] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69784) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2502.853087] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c340e0-ec2c-400d-9df7-63e334686422 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.862543] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c373d5fa-9b5d-428e-b619-7e70814c1ef3 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.876278] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba2e8ab-daaa-4a2f-9ed4-99661a8cc0eb {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.882514] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496b98dc-f954-4fc5-b1e4-2413f25d6663 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.910440] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180942MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=69784) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2502.910599] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2502.910759] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2502.966475] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e35ffffb-c0fd-4236-b489-80eb0fdb4e37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2502.966668] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 86a94763-92c8-4689-b37a-3dc6c1ec744c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2502.966801] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance e1969fb1-0f70-42c9-a362-f1efb6ee4619 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2502.966924] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 265a4f1d-8214-42ac-bbd0-4c3758ea6fed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2502.967078] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2502.967213] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Instance 077714dd-c77d-40d7-8546-e44eb00e24b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69784) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2502.967375] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2502.967512] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=69784) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2503.040231] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d7883c-6f87-44ed-a34e-e4b00584519b {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2503.047442] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce7b314-efbe-4668-b6bd-a3dec23c429c {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2503.076816] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a84e81b-3797-4eef-b195-3aa0c71159ed {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2503.083883] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea73e0ee-a5a6-487a-a3dc-516915de4693 {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2503.096492] env[69784]: DEBUG nova.compute.provider_tree [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed in ProviderTree for provider: dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 {{(pid=69784) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2503.104241] env[69784]: DEBUG nova.scheduler.client.report [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Inventory has not changed for provider dd1f4ab1-b8e3-4df8-9c6e-de86e55f70b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69784) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2503.119635] env[69784]: DEBUG nova.compute.resource_tracker [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69784) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2503.119814] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.209s {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2503.125873] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2503.139969] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Getting list of instances from cluster (obj){ [ 2503.139969] env[69784]: value = "domain-c8" [ 2503.139969] env[69784]: _type = "ClusterComputeResource" [ 2503.139969] env[69784]: } {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2503.141435] env[69784]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415873a1-63c5-4147-93ff-a0be6308553f {{(pid=69784) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2503.154664] env[69784]: DEBUG nova.virt.vmwareapi.vmops [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Got total of 6 instances {{(pid=69784) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2503.154832] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid e35ffffb-c0fd-4236-b489-80eb0fdb4e37 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2503.155058] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 86a94763-92c8-4689-b37a-3dc6c1ec744c {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2503.155244] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid e1969fb1-0f70-42c9-a362-f1efb6ee4619 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2503.155399] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 265a4f1d-8214-42ac-bbd0-4c3758ea6fed {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2503.155552] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 74fe4356-3f52-4f4c-8bad-b065c2c3ac0f {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2503.155721] env[69784]: DEBUG nova.compute.manager [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Triggering sync for uuid 077714dd-c77d-40d7-8546-e44eb00e24b3 {{(pid=69784) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2503.156050] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "e35ffffb-c0fd-4236-b489-80eb0fdb4e37" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2503.156318] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "86a94763-92c8-4689-b37a-3dc6c1ec744c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2503.156562] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "e1969fb1-0f70-42c9-a362-f1efb6ee4619" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2503.156769] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "265a4f1d-8214-42ac-bbd0-4c3758ea6fed" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2503.156966] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "74fe4356-3f52-4f4c-8bad-b065c2c3ac0f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2503.157245] env[69784]: DEBUG oslo_concurrency.lockutils [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Acquiring lock "077714dd-c77d-40d7-8546-e44eb00e24b3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69784) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2503.865920] env[69784]: DEBUG oslo_service.periodic_task [None req-b9658bd4-377f-4808-af12-81ad07d8d443 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69784) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}